diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 82efcf6558410..e2f6a79affc53 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,14 +1,19 @@ # Notify all committers of DB migration changes, per SIP-59 + # https://github.com/apache/superset/issues/13351 + /superset/migrations/ @apache/superset-committers # Notify Preset team when ephemeral env settings are changed + .github/workflows/ecs-task-definition.json @robdiciuccio @craig-rueda @rusackas @eschutho @dpgaspar @nytai @mistercrunch .github/workflows/docker-ephemeral-env.yml @robdiciuccio @craig-rueda @rusackas @eschutho @dpgaspar @nytai @mistercrunch .github/workflows/ephemeral*.yml @robdiciuccio @craig-rueda @rusackas @eschutho @dpgaspar @nytai @mistercrunch # Notify some committers of changes in the Select component -/superset-frontend/src/components/Select/ @michael-s-molina @geido + +/superset-frontend/src/components/Select/ @michael-s-molina @geido @ktmud # Notify Helm Chart maintainers about changes in it + /helm/superset/ @craig-rueda @dpgaspar @villebro diff --git a/.github/workflows/superset-e2e.yml b/.github/workflows/superset-e2e.yml index be0df99551a40..e99c5ee05ef51 100644 --- a/.github/workflows/superset-e2e.yml +++ b/.github/workflows/superset-e2e.yml @@ -31,7 +31,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} services: postgres: - image: postgres:10-alpine + image: postgres:14-alpine env: POSTGRES_USER: superset POSTGRES_PASSWORD: superset diff --git a/.github/workflows/superset-python-integrationtest.yml b/.github/workflows/superset-python-integrationtest.yml index a5a7705bcd88e..926d6185bf4e8 100644 --- a/.github/workflows/superset-python-integrationtest.yml +++ b/.github/workflows/superset-python-integrationtest.yml @@ -88,7 +88,7 @@ jobs: SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset services: postgres: - image: postgres:10-alpine + image: postgres:14-alpine env: POSTGRES_USER: superset POSTGRES_PASSWORD: superset diff --git a/.github/workflows/superset-python-presto-hive.yml b/.github/workflows/superset-python-presto-hive.yml index 3a4022d893eef..097b2f45adf9b 100644 --- a/.github/workflows/superset-python-presto-hive.yml +++ b/.github/workflows/superset-python-presto-hive.yml @@ -23,7 +23,7 @@ jobs: SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default services: postgres: - image: postgres:10-alpine + image: postgres:14-alpine env: POSTGRES_USER: superset POSTGRES_PASSWORD: superset @@ -101,7 +101,7 @@ jobs: UPLOAD_FOLDER: /tmp/.superset/uploads/ services: postgres: - image: postgres:10-alpine + image: postgres:14-alpine env: POSTGRES_USER: superset POSTGRES_PASSWORD: superset diff --git a/README.md b/README.md index 9d95180f26370..5bea6eed78ba0 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ Superset provides: **Large Gallery of Visualizations** -
+
**Craft Beautiful, Dynamic Dashboards** diff --git a/UPDATING.md b/UPDATING.md index 2915976bcd66a..fb6565848a164 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -30,27 +30,25 @@ assists people when migrating to a new version. ### Breaking Changes -- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case. -- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X has breaking changes on it's command line invocation. - Please follow: https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation - Consider migrating you celery config if you haven't already: https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map -- [19049](https://github.com/apache/superset/pull/19049): APP_ICON_WIDTH has been removed from the config. Superset should now be able to handle different logo sizes without having to explicitly set an APP_ICON_WIDTH. This might affect the size of existing custom logos as the UI will now resize them according to the specified space of maximum 148px and not according to the value of APP_ICON_WIDTH. -- [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE` = "Gamma" to have the same functionality. -- [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure celery directly using `CELERY_CONFIG` on Superset -- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case. -- [17556](https://github.com/apache/superset/pull/17556): Bumps mysqlclient from v1 to v2 -- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` setting has moved from app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward. -- [17881](https://github.com/apache/superset/pull/17881): Previously simple adhoc filter values on string columns were stripped of enclosing single and double quotes. To fully support literal quotes in filters, both single and double quotes will no longer be removed from filter values. -- [17984](https://github.com/apache/superset/pull/17984): Default Flask SECRET_KEY has changed for security reasons. You should always override with your own secret. Set `PREVIOUS_SECRET_KEY` (ex: PREVIOUS_SECRET_KEY = "\2\1thisismyscretkey\1\2\\e\\y\\y\\h") with your previous key and use `superset re-encrypt-secrets` to rotate you current secrets -- [15254](https://github.com/apache/superset/pull/15254): Previously `QUERY_COST_FORMATTERS_BY_ENGINE`, `SQL_VALIDATORS_BY_ENGINE` and `SCHEDULED_QUERIES` were expected to be defined in the feature flag dictionary in the `config.py` file. These should now be defined as a top-level config, with the feature flag dictionary being reserved for boolean only values. -- [17539](https://github.com/apache/superset/pull/17539): all Superset CLI commands (init, load_examples and etc) require setting the FLASK_APP environment variable (which is set by default when `.flaskenv` is loaded) -- [18970](https://github.com/apache/superset/pull/18970): Changes feature -flag for the legacy datasource editor (DISABLE_LEGACY_DATASOURCE_EDITOR) in config.py to True, thus disabling the feature from being shown in the client. -- [19083](https://github.com/apache/superset/pull/19083): Updates the mutator function in the config file to take a sql argument and a list of kwargs. Any `SQL_QUERY_MUTATOR` config function overrides will need to be updated to match the new set of params. It is advised regardless of the dictionary args that you list in your function arguments, to keep **kwargs as the last argument to allow for any new kwargs to be passed in. +- [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE = "Gamma"` to have the same functionality. +- [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure Celery directly using `CELERY_CONFIG` on Superset. +- [19262](https://github.com/apache/superset/pull/19262): Per [SIP-11](https://github.com/apache/superset/issues/6032) and [SIP-68](https://github.com/apache/superset/issues/14909) the native NoSQL Druid connector is deprecated and will no longer be supported. Druid SQL is still [supported](https://superset.apache.org/docs/databases/druid). +- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (premantly enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case. +- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permantly enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case. +- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation. Please follow [these](https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map). +- [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default. +- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward. +- [19107](https://github.com/apache/superset/pull/19107): The `SQLLAB_BACKEND_PERSISTENCE` feature flag is now `True` by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`. +- [19083](https://github.com/apache/superset/pull/19083): Updates the mutator function in the config file to take a SQL argument and a list of kwargs. Any `SQL_QUERY_MUTATOR` config function overrides will need to be updated to match the new set of params. It is advised regardless of the dictionary args that you list in your function arguments, to keep `**kwargs` as the last argument to allow for any new kwargs to be passed in. +- [19049](https://github.com/apache/superset/pull/19049): The `APP_ICON_WIDTH` config key has been removed. Superset should now be able to handle different logo sizes without having to explicitly set an `APP_ICON_WIDTH`. This might affect the size of existing custom logos as the UI will now resize them according to the specified space of maximum 148px and not according to the value of `APP_ICON_WIDTH`. - [19017](https://github.com/apache/superset/pull/19017): Removes Python 3.7 support. -- [19142](https://github.com/apache/superset/pull/19142): Changes feature flag for versioned export(VERSIONED_EXPORT) to be true. -- [19107](https://github.com/apache/superset/pull/19107): Feature flag `SQLLAB_BACKEND_PERSISTENCE` is now on by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`. -- [19262](https://github.com/apache/superset/pull/19262): As per SIPs 11 and 68, the native NoSQL Druid connector is deprecated as of 2.0 and will no longer be supported. Druid is still supported through SQLAlchemy via pydruid. +- [18976](https://github.com/apache/superset/pull/18976): When running the app in debug mode, the app will default to use `SimpleCache` for `FILTER_STATE_CACHE_CONFIG` and `EXPLORE_FORM_DATA_CACHE_CONFIG`. When running in non-debug mode, a cache backend will need to be defined, otherwise the application will fail to start. For installations using Redis or other caching backends, it is recommended to use the same backend for both cache configs. +- [18970](https://github.com/apache/superset/pull/18970): The `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag is now `True` by default which disables the legacy datasource editor from being shown in the client. +- [17984](https://github.com/apache/superset/pull/17984): The default Flask SECRET_KEY has changed for security reasons. You should always override with your own secret. Set `PREVIOUS_SECRET_KEY`, e.g. `PREVIOUS_SECRET_KEY = "\2\1thisismyscretkey\1\2\\e\\y\\y\\h"`, with your previous key and use `superset re-encrypt-secrets` to rotate you current secrets. +- [17881](https://github.com/apache/superset/pull/17881): Previously simple adhoc filter values on string columns were stripped of enclosing single and double quotes. To fully support literal quotes in filters, both single and double quotes will no longer be removed from filter values. +- [17556](https://github.com/apache/superset/pull/17556): Bumps `mysqlclient` from v1 to v2. +- [17539](https://github.com/apache/superset/pull/17539): All Superset CLI commands, e.g. `init`, `load_examples`, etc. require setting the `FLASK_APP` environment variable (which is set by default when `.flaskenv` is loaded). +- [15254](https://github.com/apache/superset/pull/15254): The `QUERY_COST_FORMATTERS_BY_ENGINE`, `SQL_VALIDATORS_BY_ENGINE` and `SCHEDULED_QUERIES` feature flags are now defined as config keys given that feature flags are reserved for boolean only values. ### Potential Downtime diff --git a/docker-compose.yml b/docker-compose.yml index b7bf745ad6ff6..907ca51129caa 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -40,7 +40,7 @@ services: db: env_file: docker/.env - image: postgres:10 + image: postgres:14 container_name: superset_db restart: unless-stopped ports: diff --git a/docs/docs/installation/configuring-superset.mdx b/docs/docs/installation/configuring-superset.mdx index 86bddda180f30..1384b62741cba 100644 --- a/docs/docs/installation/configuring-superset.mdx +++ b/docs/docs/installation/configuring-superset.mdx @@ -69,6 +69,32 @@ you can add the endpoints to `WTF_CSRF_EXEMPT_LIST`: WTF_CSRF_EXEMPT_LIST = [‘’] ``` +### Using a production metastore + +By default Superset is configured to use SQLite, it's a simple and fast way to get you started +(no installation needed). But for production environments you should use a different database engine on +a separate host or container. + +Superset supports the following database engines/versions: + +| Database Engine | Supported Versions | +| --------------------------------------------------------- | --------------------------------- | +| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X | +| [MySQL](https://www.mysql.com/) | 5.X | + + +Use the following database drivers and connection strings: + +| Database | PyPI package | Connection String | +| ----------------------------------------- | --------------------------------- | ------------------------------------------------------------------------ | +| [PostgreSQL](https://www.postgresql.org/) | `pip install psycopg2` | `postgresql://:@/` | +| [MySQL](https://www.mysql.com/) | `pip install mysqlclient` | `mysql://:@/` | +| SQLite | No additional library needed | `sqlite://` | + +To configure Superset metastore set `SQLALCHEMY_DATABASE_URI` config key on `superset_config` +to the appropriate connection string. + + ### Running on a WSGI HTTP Server While you can run Superset on NGINX or Apache, we recommend using Gunicorn in async mode. This diff --git a/docs/docs/installation/installing-superset-using-docker-compose.mdx b/docs/docs/installation/installing-superset-using-docker-compose.mdx index 4d7056a165d8b..ced6ba5660a3b 100644 --- a/docs/docs/installation/installing-superset-using-docker-compose.mdx +++ b/docs/docs/installation/installing-superset-using-docker-compose.mdx @@ -109,7 +109,7 @@ username: admin password: admin ``` -### 5. Connecting your local database instance to superset +### 5. Connecting Superset to your local database instance When running Superset using `docker` or `docker-compose` it runs in its own docker container, as if the Superset was running in a separate machine entirely. Therefore attempts to connect to your local database with hostname `localhost` won't work as `localhost` refers to the docker container Superset is running in, and not your actual host machine. Fortunately, docker provides an easy way to access network resources in the host machine from inside a container, and we will leverage this capability to connect to our local database instance. diff --git a/docs/yarn.lock b/docs/yarn.lock index f58b8a5078d35..e5d08fa18c5a6 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -3862,9 +3862,9 @@ async-validator@^4.0.2: integrity sha512-Pj2IR7u8hmUEDOwB++su6baaRi+QvsgajuFB9j95foM1N2gy5HM4z60hfusIO0fBPG5uLAEl6yCJr1jNSVugEQ== async@^2.6.2: - version "2.6.3" - resolved "https://registry.npmjs.org/async/-/async-2.6.3.tgz" - integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== + version "2.6.4" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221" + integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA== dependencies: lodash "^4.17.14" diff --git a/helm/superset/Chart.yaml b/helm/superset/Chart.yaml index 64600f5973ed4..2ac78630149b5 100644 --- a/helm/superset/Chart.yaml +++ b/helm/superset/Chart.yaml @@ -22,10 +22,10 @@ maintainers: - name: craig-rueda email: craig@craigrueda.com url: https://github.com/craig-rueda -version: 0.5.11 +version: 0.6.0 dependencies: - name: postgresql - version: 10.2.0 + version: 11.1.22 repository: https://charts.bitnami.com/bitnami condition: postgresql.enabled - name: redis diff --git a/superset-frontend/cypress-base/cypress/integration/chart_list/list_view.test.ts b/superset-frontend/cypress-base/cypress/integration/chart_list/list_view.test.ts index 6da5d90106d15..42313d78495f4 100644 --- a/superset-frontend/cypress-base/cypress/integration/chart_list/list_view.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/chart_list/list_view.test.ts @@ -21,11 +21,12 @@ import { CHART_LIST } from './chart_list.helper'; describe('chart list view', () => { beforeEach(() => { cy.login(); - cy.visit(CHART_LIST); - cy.get('[aria-label="list-view"]').click(); }); it('should load rows', () => { + cy.visit(CHART_LIST); + cy.get('[aria-label="list-view"]').click(); + cy.get('[data-test="listview-table"]').should('be.visible'); // check chart list view header cy.get('[data-test="sort-header"]').eq(1).contains('Chart'); @@ -49,6 +50,17 @@ describe('chart list view', () => { }); it('should bulk delete correctly', () => { + // Load the chart list order by name asc. + // This will ensure the tests stay consistent, and the + // same charts get deleted every time + cy.visit(CHART_LIST, { + qs: { + sortColumn: 'slice_name', + sortOrder: 'asc', + }, + }); + cy.get('[aria-label="list-view"]').click(); + cy.get('[data-test="listview-table"]').should('be.visible'); cy.get('[data-test="bulk-select"]').eq(0).click(); cy.get('[aria-label="checkbox-off"]').eq(1).siblings('input').click(); diff --git a/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts b/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts index 271f86e9ed3b8..97dfd2945aaf5 100644 --- a/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts @@ -121,14 +121,12 @@ describe('Test datatable', () => { cy.visitChartByName('Daily Totals'); }); it('Data Pane opens and loads results', () => { - cy.get('[data-test="data-tab"]').click(); + cy.contains('Results').click(); cy.get('[data-test="row-count-label"]').contains('26 rows retrieved'); - cy.contains('View results'); cy.get('.ant-empty-description').should('not.exist'); }); it('Datapane loads view samples', () => { - cy.get('[data-test="data-tab"]').click(); - cy.contains('View samples').click(); + cy.contains('Samples').click(); cy.get('[data-test="row-count-label"]').contains('1k rows retrieved'); cy.get('.ant-empty-description').should('not.exist'); }); diff --git a/superset-frontend/cypress-base/package-lock.json b/superset-frontend/cypress-base/package-lock.json index d3a3153eff7f0..eb4367a35e1fa 100644 --- a/superset-frontend/cypress-base/package-lock.json +++ b/superset-frontend/cypress-base/package-lock.json @@ -2593,9 +2593,9 @@ } }, "node_modules/async": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", - "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", + "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==" }, "node_modules/asynckit": { "version": "0.4.0", @@ -10487,9 +10487,9 @@ "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==" }, "async": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", - "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", + "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==" }, "asynckit": { "version": "0.4.0", diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 4cf5cdba13950..046e1536e87d5 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -245,7 +245,7 @@ "jsdom": "^16.4.0", "lerna": "^4.0.0", "less": "^3.12.2", - "less-loader": "^5.0.0", + "less-loader": "^10.2.0", "mini-css-extract-plugin": "^2.3.0", "mock-socket": "^9.0.3", "node-fetch": "^2.6.1", @@ -2705,6 +2705,29 @@ "node": ">=0.1.95" } }, + "node_modules/@cspotcode/source-map-consumer": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz", + "integrity": "sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg==", + "dev": true, + "peer": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz", + "integrity": "sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA==", + "dev": true, + "peer": true, + "dependencies": { + "@cspotcode/source-map-consumer": "0.8.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/@ctrl/tinycolor": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.3.1.tgz", @@ -21907,10 +21930,6 @@ "resolved": "plugins/legacy-plugin-chart-event-flow", "link": true }, - "node_modules/@superset-ui/legacy-plugin-chart-force-directed": { - "resolved": "plugins/legacy-plugin-chart-force-directed", - "link": true - }, "node_modules/@superset-ui/legacy-plugin-chart-heatmap": { "resolved": "plugins/legacy-plugin-chart-heatmap", "link": true @@ -22410,6 +22429,34 @@ "node": ">=10.13.0" } }, + "node_modules/@tsconfig/node10": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz", + "integrity": "sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz", + "integrity": "sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz", + "integrity": "sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz", + "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==", + "dev": true, + "peer": true + }, "node_modules/@types/aria-query": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.0.tgz", @@ -24408,9 +24455,9 @@ "integrity": "sha512-NBOQlm9+7RBqRqZwimpgquaLeTJFayqb9UEPtTkpC3TkkwDnlsT/TwsCC0svjt9kEZ6G9mH5AEOHSz6Q/HrzQQ==" }, "node_modules/acorn": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", - "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", "bin": { "acorn": "bin/acorn" }, @@ -24428,6 +24475,18 @@ "acorn-walk": "^7.1.1" } }, + "node_modules/acorn-globals/node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/acorn-import-assertions": { "version": "1.7.6", "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.7.6.tgz", @@ -25004,6 +25063,19 @@ "resolved": "https://registry.npmjs.org/app-root-dir/-/app-root-dir-1.0.2.tgz", "integrity": "sha1-OBh+wt6nV3//Az/8sSFyaS/24Rg=" }, + "node_modules/append-transform": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", + "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", + "dev": true, + "peer": true, + "dependencies": { + "default-require-extensions": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -25030,6 +25102,13 @@ ], "peer": true }, + "node_modules/archy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", + "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=", + "dev": true, + "peer": true + }, "node_modules/are-we-there-yet": { "version": "1.1.7", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", @@ -25040,6 +25119,13 @@ "readable-stream": "^2.0.6" } }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "peer": true + }, "node_modules/argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -25519,12 +25605,12 @@ } }, "node_modules/babel-loader": { - "version": "8.2.2", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.2.tgz", - "integrity": "sha512-JvTd0/D889PQBtUXJ2PXaKU/pjZDMtHA9V2ecm+eNRmmBCMR09a+fmpGTNwnJtFmFl5Ei7Vy47LjBb+L0wQ99g==", + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz", + "integrity": "sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==", "dependencies": { "find-cache-dir": "^3.3.1", - "loader-utils": "^1.4.0", + "loader-utils": "^2.0.0", "make-dir": "^3.1.0", "schema-utils": "^2.6.5" }, @@ -25564,6 +25650,30 @@ "node": ">=8" } }, + "node_modules/babel-loader/node_modules/json5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/babel-loader/node_modules/loader-utils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz", + "integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==", + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, "node_modules/babel-loader/node_modules/locate-path": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", @@ -27119,6 +27229,48 @@ "node": ">=6" } }, + "node_modules/caching-transform": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", + "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", + "dev": true, + "peer": true, + "dependencies": { + "hasha": "^5.0.0", + "make-dir": "^3.0.0", + "package-hash": "^4.0.0", + "write-file-atomic": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/caching-transform/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/caching-transform/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true, + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/call-bind": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", @@ -29575,6 +29727,13 @@ "react": "^0.14.0 || ^15.0.0 || ^16.0.0" } }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "peer": true + }, "node_modules/cross-env": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-5.2.0.tgz", @@ -31622,6 +31781,29 @@ "node": ">= 8" } }, + "node_modules/default-require-extensions": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz", + "integrity": "sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==", + "dev": true, + "peer": true, + "dependencies": { + "strip-bom": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/default-require-extensions/node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, "node_modules/defaults": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", @@ -32905,6 +33087,13 @@ "node": ">=0.4.0" } }, + "node_modules/es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true, + "peer": true + }, "node_modules/es6-shim": { "version": "0.35.6", "resolved": "https://registry.npmjs.org/es6-shim/-/es6-shim-0.35.6.tgz", @@ -34534,6 +34723,17 @@ "node": ">=0.4.0" } }, + "node_modules/falafel/node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/falafel/node_modules/isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", @@ -36929,6 +37129,36 @@ "minimalistic-assert": "^1.0.1" } }, + "node_modules/hasha": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", + "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", + "dev": true, + "peer": true, + "dependencies": { + "is-stream": "^2.0.0", + "type-fest": "^0.8.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/hasha/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/hast-to-hyperscript": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", @@ -38980,6 +39210,19 @@ "node": ">=8" } }, + "node_modules/istanbul-lib-hook": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", + "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", + "dev": true, + "peer": true, + "dependencies": { + "append-transform": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/istanbul-lib-instrument": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", @@ -39004,6 +39247,128 @@ "semver": "bin/semver.js" } }, + "node_modules/istanbul-lib-processinfo": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz", + "integrity": "sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==", + "dev": true, + "peer": true, + "dependencies": { + "archy": "^1.0.0", + "cross-spawn": "^7.0.0", + "istanbul-lib-coverage": "^3.0.0-alpha.1", + "make-dir": "^3.0.0", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "uuid": "^3.3.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "peer": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "peer": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "peer": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "peer": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/istanbul-lib-report": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", @@ -41655,6 +42020,18 @@ } } }, + "node_modules/jsdom/node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/jsdom/node_modules/escodegen": { "version": "1.14.3", "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", @@ -42130,30 +42507,23 @@ } }, "node_modules/less-loader": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-5.0.0.tgz", - "integrity": "sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-10.2.0.tgz", + "integrity": "sha512-AV5KHWvCezW27GT90WATaDnfXBv99llDbtaj4bshq6DvAihMdNjaPDcUMa6EXKLRF+P2opFenJp89BXg91XLYg==", "dev": true, "dependencies": { - "clone": "^2.1.1", - "loader-utils": "^1.1.0", - "pify": "^4.0.1" + "klona": "^2.0.4" }, "engines": { - "node": ">= 4.8.0" + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" }, "peerDependencies": { - "less": "^2.3.1 || ^3.0.0", - "webpack": "^2.0.0 || ^3.0.0 || ^4.0.0" - } - }, - "node_modules/less-loader/node_modules/pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true, - "engines": { - "node": ">=6" + "less": "^3.5.0 || ^4.0.0", + "webpack": "^5.0.0" } }, "node_modules/less/node_modules/source-map": { @@ -43208,6 +43578,13 @@ "node": ">=6" } }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "peer": true + }, "node_modules/make-fetch-happen": { "version": "8.0.14", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", @@ -44449,9 +44826,9 @@ } }, "node_modules/moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==", + "version": "2.29.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", + "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==", "engines": { "node": "*" } @@ -45108,6 +45485,19 @@ "node": ">= 8" } }, + "node_modules/node-preload": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", + "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", + "dev": true, + "peer": true, + "dependencies": { + "process-on-spawn": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/node-releases": { "version": "1.1.75", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.75.tgz", @@ -45614,6 +46004,204 @@ "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==", "dev": true }, + "node_modules/nyc": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", + "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", + "dev": true, + "peer": true, + "dependencies": { + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "caching-transform": "^4.0.0", + "convert-source-map": "^1.7.0", + "decamelize": "^1.2.0", + "find-cache-dir": "^3.2.0", + "find-up": "^4.1.0", + "foreground-child": "^2.0.0", + "get-package-type": "^0.1.0", + "glob": "^7.1.6", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-hook": "^3.0.0", + "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-processinfo": "^2.0.2", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.0.2", + "make-dir": "^3.0.0", + "node-preload": "^0.2.1", + "p-map": "^3.0.0", + "process-on-spawn": "^1.0.0", + "resolve-from": "^5.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "spawn-wrap": "^2.0.0", + "test-exclude": "^6.0.0", + "yargs": "^15.0.2" + }, + "bin": { + "nyc": "bin/nyc.js" + }, + "engines": { + "node": ">=8.9" + } + }, + "node_modules/nyc/node_modules/find-cache-dir": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", + "dev": true, + "peer": true, + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" + } + }, + "node_modules/nyc/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "peer": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "peer": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/nyc/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "peer": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/nyc/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "peer": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "peer": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/nyc/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "peer": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true, + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", @@ -46262,6 +46850,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/package-hash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", + "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", + "dev": true, + "peer": true, + "dependencies": { + "graceful-fs": "^4.1.15", + "hasha": "^5.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/pacote": { "version": "12.0.2", "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.2.tgz", @@ -47331,6 +47935,19 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" }, + "node_modules/process-on-spawn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", + "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", + "dev": true, + "peer": true, + "dependencies": { + "fromentries": "^1.2.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -48786,9 +49403,9 @@ } }, "node_modules/react-hot-loader": { - "version": "4.12.20", - "resolved": "https://registry.npmjs.org/react-hot-loader/-/react-hot-loader-4.12.20.tgz", - "integrity": "sha512-lPlv1HVizi0lsi+UFACBJaydtRYILWkfHAC/lyCs6ZlAxlOZRQIfYHDqiGaRvL/GF7zyti+Qn9XpnDAUvdFA4A==", + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/react-hot-loader/-/react-hot-loader-4.13.0.tgz", + "integrity": "sha512-JrLlvUPqh6wIkrK2hZDfOyq/Uh/WeVEr8nc7hkn2/3Ul0sx1Kr5y4kOGNacNRoj7RhwLNcQ3Udf1KJXrqc0ZtA==", "dependencies": { "fast-levenshtein": "^2.0.6", "global": "^4.3.0", @@ -48803,9 +49420,14 @@ "node": ">= 6" }, "peerDependencies": { - "@types/react": "^15.0.0 || ^16.0.0", - "react": "^15.0.0 || ^16.0.0", - "react-dom": "^15.0.0 || ^16.0.0" + "@types/react": "^15.0.0 || ^16.0.0 || ^17.0.0 ", + "react": "^15.0.0 || ^16.0.0 || ^17.0.0 ", + "react-dom": "^15.0.0 || ^16.0.0 || ^17.0.0 " + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } } }, "node_modules/react-hot-loader/node_modules/source-map": { @@ -50203,6 +50825,19 @@ "node": ">= 0.10" } }, + "node_modules/release-zalgo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", + "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", + "dev": true, + "peer": true, + "dependencies": { + "es6-error": "^4.0.1" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/remark-external-links": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/remark-external-links/-/remark-external-links-8.0.0.tgz", @@ -52053,6 +52688,66 @@ "trim": "0.0.1" } }, + "node_modules/spawn-wrap": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", + "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", + "dev": true, + "peer": true, + "dependencies": { + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "which": "^2.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/spawn-wrap/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/spawn-wrap/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/spawn-wrap/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "peer": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/spdx-correct": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", @@ -53967,6 +54662,60 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, + "node_modules/ts-node": { + "version": "10.7.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.7.0.tgz", + "integrity": "sha512-TbIGS4xgJoX2i3do417KSaep1uRAW/Lu+WAL2doDHC0D6ummjirVOXU5/7aiZotbQ5p1Zp9tP7U6cYhA0O7M8A==", + "dev": true, + "peer": true, + "dependencies": { + "@cspotcode/source-map-support": "0.7.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.0", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/ts-pnp": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.2.0.tgz", @@ -54518,9 +55267,9 @@ } }, "node_modules/url-parse": { - "version": "1.5.7", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.7.tgz", - "integrity": "sha512-HxWkieX+STA38EDk7CE9MEryFeHCKzgagxlGvsdS7WBImq9Mk+PGwiT56w82WI3aicwJA8REp42Cxo98c8FZMA==", + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", "dev": true, "dependencies": { "querystringify": "^2.1.1", @@ -54676,6 +55425,13 @@ "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", "dev": true }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.0.tgz", + "integrity": "sha512-mpSYqfsFvASnSn5qMiwrr4VKfumbPyONLCOPmsR3A6pTY/r0+tSaVbgPWSAIuzbk3lCTa+FForeTiO+wBQGkjA==", + "dev": true, + "peer": true + }, "node_modules/v8-to-istanbul": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-7.0.0.tgz", @@ -55105,18 +55861,6 @@ "node": ">= 10.13.0" } }, - "node_modules/webpack-bundle-analyzer/node_modules/acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/webpack-bundle-analyzer/node_modules/acorn-walk": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", @@ -55767,17 +56511,6 @@ "@xtuc/long": "4.2.2" } }, - "node_modules/webpack/node_modules/acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/webpack/node_modules/enhanced-resolve": { "version": "5.8.2", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz", @@ -58301,6 +59034,16 @@ "integrity": "sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==", "dev": true }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", @@ -58498,7 +59241,7 @@ "@types/d3-format": "^1.3.0", "@types/d3-interpolate": "^1.3.1", "@types/d3-scale": "^2.1.1", - "@types/d3-time": "^1.0.9", + "@types/d3-time": "^3.0.0", "@types/d3-time-format": "^2.1.0", "@types/enzyme": "^3.10.5", "@types/fetch-mock": "^7.3.3", @@ -58544,6 +59287,11 @@ "tinycolor2": "*" } }, + "packages/superset-ui-core/node_modules/@types/d3-time": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.0.tgz", + "integrity": "sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==" + }, "packages/superset-ui-core/node_modules/@vx/responsive": { "version": "0.0.199", "resolved": "https://registry.npmjs.org/@vx/responsive/-/responsive-0.0.199.tgz", @@ -59098,6 +59846,7 @@ "plugins/legacy-plugin-chart-force-directed": { "name": "@superset-ui/legacy-plugin-chart-force-directed", "version": "0.0.1", + "extraneous": true, "dependencies": { "d3": "^3.5.17", "prop-types": "^15.7.2" @@ -59785,11 +60534,7 @@ "tools/eslint-plugin-theme-colors": { "version": "1.0.0", "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^16.9.1", - "npm": "^7.5.4" - } + "license": "Apache-2.0" } }, "dependencies": { @@ -61465,6 +62210,23 @@ "minimist": "^1.2.0" } }, + "@cspotcode/source-map-consumer": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz", + "integrity": "sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg==", + "dev": true, + "peer": true + }, + "@cspotcode/source-map-support": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz", + "integrity": "sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA==", + "dev": true, + "peer": true, + "requires": { + "@cspotcode/source-map-consumer": "0.8.0" + } + }, "@ctrl/tinycolor": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.3.1.tgz", @@ -76192,7 +76954,7 @@ "@types/d3-format": "^1.3.0", "@types/d3-interpolate": "^1.3.1", "@types/d3-scale": "^2.1.1", - "@types/d3-time": "^1.0.9", + "@types/d3-time": "^3.0.0", "@types/d3-time-format": "^2.1.0", "@types/enzyme": "^3.10.5", "@types/fetch-mock": "^7.3.3", @@ -76224,6 +76986,11 @@ "whatwg-fetch": "^3.0.0" }, "dependencies": { + "@types/d3-time": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.0.tgz", + "integrity": "sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==" + }, "@vx/responsive": { "version": "0.0.199", "resolved": "https://registry.npmjs.org/@vx/responsive/-/responsive-0.0.199.tgz", @@ -76641,13 +77408,6 @@ "prop-types": "^15.6.2" } }, - "@superset-ui/legacy-plugin-chart-force-directed": { - "version": "file:plugins/legacy-plugin-chart-force-directed", - "requires": { - "d3": "^3.5.17", - "prop-types": "^15.7.2" - } - }, "@superset-ui/legacy-plugin-chart-heatmap": { "version": "file:plugins/legacy-plugin-chart-heatmap", "requires": { @@ -77378,6 +78138,34 @@ "integrity": "sha512-Z6DoceYb/1xSg5+e+ZlPZ9v0N16ZvZ+wYMraFue4HYrE4ttONKtsvruIRf6t9TBR0YvSOfi1hUU0fJfBLCDYow==", "dev": true }, + "@tsconfig/node10": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz", + "integrity": "sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==", + "dev": true, + "peer": true + }, + "@tsconfig/node12": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz", + "integrity": "sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==", + "dev": true, + "peer": true + }, + "@tsconfig/node14": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz", + "integrity": "sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==", + "dev": true, + "peer": true + }, + "@tsconfig/node16": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz", + "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==", + "dev": true, + "peer": true + }, "@types/aria-query": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.0.tgz", @@ -79099,9 +79887,9 @@ "integrity": "sha512-NBOQlm9+7RBqRqZwimpgquaLeTJFayqb9UEPtTkpC3TkkwDnlsT/TwsCC0svjt9kEZ6G9mH5AEOHSz6Q/HrzQQ==" }, "acorn": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", - "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==" + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==" }, "acorn-globals": { "version": "6.0.0", @@ -79111,6 +79899,14 @@ "requires": { "acorn": "^7.1.1", "acorn-walk": "^7.1.1" + }, + "dependencies": { + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + } } }, "acorn-import-assertions": { @@ -79573,6 +80369,16 @@ "resolved": "https://registry.npmjs.org/app-root-dir/-/app-root-dir-1.0.2.tgz", "integrity": "sha1-OBh+wt6nV3//Az/8sSFyaS/24Rg=" }, + "append-transform": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", + "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", + "dev": true, + "peer": true, + "requires": { + "default-require-extensions": "^3.0.0" + } + }, "aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -79585,6 +80391,13 @@ "dev": true, "peer": true }, + "archy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", + "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=", + "dev": true, + "peer": true + }, "are-we-there-yet": { "version": "1.1.7", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", @@ -79595,6 +80408,13 @@ "readable-stream": "^2.0.6" } }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "peer": true + }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -79974,12 +80794,12 @@ } }, "babel-loader": { - "version": "8.2.2", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.2.tgz", - "integrity": "sha512-JvTd0/D889PQBtUXJ2PXaKU/pjZDMtHA9V2ecm+eNRmmBCMR09a+fmpGTNwnJtFmFl5Ei7Vy47LjBb+L0wQ99g==", + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz", + "integrity": "sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==", "requires": { "find-cache-dir": "^3.3.1", - "loader-utils": "^1.4.0", + "loader-utils": "^2.0.0", "make-dir": "^3.1.0", "schema-utils": "^2.6.5" }, @@ -80003,6 +80823,21 @@ "path-exists": "^4.0.0" } }, + "json5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==" + }, + "loader-utils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz", + "integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==", + "requires": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + } + }, "locate-path": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", @@ -81215,6 +82050,38 @@ "dev": true, "peer": true }, + "caching-transform": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", + "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", + "dev": true, + "peer": true, + "requires": { + "hasha": "^5.0.0", + "make-dir": "^3.0.0", + "package-hash": "^4.0.0", + "write-file-atomic": "^3.0.0" + }, + "dependencies": { + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "requires": { + "semver": "^6.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true + } + } + }, "call-bind": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", @@ -83137,6 +84004,13 @@ "warning": "^4.0.3" } }, + "create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "peer": true + }, "cross-env": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-5.2.0.tgz", @@ -84682,6 +85556,25 @@ } } }, + "default-require-extensions": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz", + "integrity": "sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==", + "dev": true, + "peer": true, + "requires": { + "strip-bom": "^4.0.0" + }, + "dependencies": { + "strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "peer": true + } + } + }, "defaults": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", @@ -85765,6 +86658,13 @@ "resolved": "https://registry.npmjs.org/es5-shim/-/es5-shim-4.6.5.tgz", "integrity": "sha512-vfQ4UAai8szn0sAubCy97xnZ4sJVDD1gt/Grn736hg8D7540wemIb1YPrYZSTqlM2H69EQX1or4HU/tSwRTI3w==" }, + "es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true, + "peer": true + }, "es6-shim": { "version": "0.35.6", "resolved": "https://registry.npmjs.org/es6-shim/-/es6-shim-0.35.6.tgz", @@ -86972,6 +87872,11 @@ "object-keys": "^1.0.6" }, "dependencies": { + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + }, "isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", @@ -88824,6 +89729,26 @@ "minimalistic-assert": "^1.0.1" } }, + "hasha": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", + "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", + "dev": true, + "peer": true, + "requires": { + "is-stream": "^2.0.0", + "type-fest": "^0.8.0" + }, + "dependencies": { + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "peer": true + } + } + }, "hast-to-hyperscript": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", @@ -90359,6 +91284,16 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz", "integrity": "sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==" }, + "istanbul-lib-hook": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", + "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", + "dev": true, + "peer": true, + "requires": { + "append-transform": "^2.0.0" + } + }, "istanbul-lib-instrument": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", @@ -90379,6 +91314,97 @@ } } }, + "istanbul-lib-processinfo": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz", + "integrity": "sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==", + "dev": true, + "peer": true, + "requires": { + "archy": "^1.0.0", + "cross-spawn": "^7.0.0", + "istanbul-lib-coverage": "^3.0.0-alpha.1", + "make-dir": "^3.0.0", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "uuid": "^3.3.3" + }, + "dependencies": { + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "peer": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "requires": { + "semver": "^6.0.0" + } + }, + "p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "peer": true, + "requires": { + "aggregate-error": "^3.0.0" + } + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "peer": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "peer": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "peer": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "peer": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, "istanbul-lib-report": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", @@ -92445,6 +93471,12 @@ "xml-name-validator": "^3.0.0" }, "dependencies": { + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + }, "escodegen": { "version": "1.14.3", "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", @@ -92823,22 +93855,12 @@ } }, "less-loader": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-5.0.0.tgz", - "integrity": "sha512-bquCU89mO/yWLaUq0Clk7qCsKhsF/TZpJUzETRvJa9KSVEL9SO3ovCvdEHISBhrC81OwC8QSVX7E0bzElZj9cg==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-10.2.0.tgz", + "integrity": "sha512-AV5KHWvCezW27GT90WATaDnfXBv99llDbtaj4bshq6DvAihMdNjaPDcUMa6EXKLRF+P2opFenJp89BXg91XLYg==", "dev": true, "requires": { - "clone": "^2.1.1", - "loader-utils": "^1.1.0", - "pify": "^4.0.1" - }, - "dependencies": { - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true - } + "klona": "^2.0.4" } }, "leven": { @@ -93689,6 +94711,13 @@ } } }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "peer": true + }, "make-fetch-happen": { "version": "8.0.14", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", @@ -94672,9 +95701,9 @@ "dev": true }, "moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==" + "version": "2.29.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", + "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==" }, "moment-timezone": { "version": "0.5.33", @@ -95227,6 +96256,16 @@ } } }, + "node-preload": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", + "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", + "dev": true, + "peer": true, + "requires": { + "process-on-spawn": "^1.0.0" + } + }, "node-releases": { "version": "1.1.75", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.75.tgz", @@ -95632,6 +96671,155 @@ "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==", "dev": true }, + "nyc": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", + "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", + "dev": true, + "peer": true, + "requires": { + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "caching-transform": "^4.0.0", + "convert-source-map": "^1.7.0", + "decamelize": "^1.2.0", + "find-cache-dir": "^3.2.0", + "find-up": "^4.1.0", + "foreground-child": "^2.0.0", + "get-package-type": "^0.1.0", + "glob": "^7.1.6", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-hook": "^3.0.0", + "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-processinfo": "^2.0.2", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.0.2", + "make-dir": "^3.0.0", + "node-preload": "^0.2.1", + "p-map": "^3.0.0", + "process-on-spawn": "^1.0.0", + "resolve-from": "^5.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "spawn-wrap": "^2.0.0", + "test-exclude": "^6.0.0", + "yargs": "^15.0.2" + }, + "dependencies": { + "find-cache-dir": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", + "dev": true, + "peer": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "peer": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "peer": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "requires": { + "semver": "^6.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "peer": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "peer": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "peer": true, + "requires": { + "aggregate-error": "^3.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "peer": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "peer": true + }, + "pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "peer": true, + "requires": { + "find-up": "^4.0.0" + } + }, + "resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "peer": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true + } + } + }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", @@ -96109,6 +97297,19 @@ "p-reduce": "^2.0.0" } }, + "package-hash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", + "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", + "dev": true, + "peer": true, + "requires": { + "graceful-fs": "^4.1.15", + "hasha": "^5.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + } + }, "pacote": { "version": "12.0.2", "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.2.tgz", @@ -96938,6 +98139,16 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" }, + "process-on-spawn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", + "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", + "dev": true, + "peer": true, + "requires": { + "fromentries": "^1.2.0" + } + }, "progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -98100,9 +99311,9 @@ } }, "react-hot-loader": { - "version": "4.12.20", - "resolved": "https://registry.npmjs.org/react-hot-loader/-/react-hot-loader-4.12.20.tgz", - "integrity": "sha512-lPlv1HVizi0lsi+UFACBJaydtRYILWkfHAC/lyCs6ZlAxlOZRQIfYHDqiGaRvL/GF7zyti+Qn9XpnDAUvdFA4A==", + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/react-hot-loader/-/react-hot-loader-4.13.0.tgz", + "integrity": "sha512-JrLlvUPqh6wIkrK2hZDfOyq/Uh/WeVEr8nc7hkn2/3Ul0sx1Kr5y4kOGNacNRoj7RhwLNcQ3Udf1KJXrqc0ZtA==", "requires": { "fast-levenshtein": "^2.0.6", "global": "^4.3.0", @@ -99240,6 +100451,16 @@ "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", "integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=" }, + "release-zalgo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", + "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", + "dev": true, + "peer": true, + "requires": { + "es6-error": "^4.0.1" + } + }, "remark-external-links": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/remark-external-links/-/remark-external-links-8.0.0.tgz", @@ -100683,6 +101904,50 @@ "trim": "0.0.1" } }, + "spawn-wrap": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", + "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", + "dev": true, + "peer": true, + "requires": { + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "which": "^2.0.1" + }, + "dependencies": { + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "peer": true, + "requires": { + "semver": "^6.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "peer": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "peer": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, "spdx-correct": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", @@ -102176,6 +103441,37 @@ } } }, + "ts-node": { + "version": "10.7.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.7.0.tgz", + "integrity": "sha512-TbIGS4xgJoX2i3do417KSaep1uRAW/Lu+WAL2doDHC0D6ummjirVOXU5/7aiZotbQ5p1Zp9tP7U6cYhA0O7M8A==", + "dev": true, + "peer": true, + "requires": { + "@cspotcode/source-map-support": "0.7.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.0", + "yn": "3.1.1" + }, + "dependencies": { + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "peer": true + } + } + }, "ts-pnp": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.2.0.tgz", @@ -102603,9 +103899,9 @@ } }, "url-parse": { - "version": "1.5.7", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.7.tgz", - "integrity": "sha512-HxWkieX+STA38EDk7CE9MEryFeHCKzgagxlGvsdS7WBImq9Mk+PGwiT56w82WI3aicwJA8REp42Cxo98c8FZMA==", + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", "dev": true, "requires": { "querystringify": "^2.1.1", @@ -102718,6 +104014,13 @@ "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", "dev": true }, + "v8-compile-cache-lib": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.0.tgz", + "integrity": "sha512-mpSYqfsFvASnSn5qMiwrr4VKfumbPyONLCOPmsR3A6pTY/r0+tSaVbgPWSAIuzbk3lCTa+FForeTiO+wBQGkjA==", + "dev": true, + "peer": true + }, "v8-to-istanbul": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-7.0.0.tgz", @@ -103175,11 +104478,6 @@ "@xtuc/long": "4.2.2" } }, - "acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==" - }, "enhanced-resolve": { "version": "5.8.2", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz", @@ -103296,12 +104594,6 @@ "ws": "^7.3.1" }, "dependencies": { - "acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", - "dev": true - }, "acorn-walk": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", @@ -105484,6 +106776,13 @@ } } }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "peer": true + }, "yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/superset-frontend/package.json b/superset-frontend/package.json index 4b4146665f6be..c477a1d6e3e15 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -305,7 +305,7 @@ "jsdom": "^16.4.0", "lerna": "^4.0.0", "less": "^3.12.2", - "less-loader": "^5.0.0", + "less-loader": "^10.2.0", "mini-css-extract-plugin": "^2.3.0", "mock-socket": "^9.0.3", "node-fetch": "^2.6.1", diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/flattenOperator.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/flattenOperator.ts index 1670a84170249..2fe732fc83d06 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/flattenOperator.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/flattenOperator.ts @@ -1,4 +1,3 @@ -/* eslint-disable camelcase */ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -17,21 +16,12 @@ * specific language governing permissions and limitationsxw * under the License. */ -import { ensureIsArray, PostProcessingFlatten } from '@superset-ui/core'; +import { PostProcessingFlatten } from '@superset-ui/core'; import { PostProcessingFactory } from './types'; export const flattenOperator: PostProcessingFactory = ( formData, queryObject, -) => { - const drop_levels: number[] = []; - if (ensureIsArray(queryObject.metrics).length === 1) { - drop_levels.push(0); - } - return { - operation: 'flatten', - options: { - drop_levels, - }, - }; -}; +) => ({ + operation: 'flatten', +}); diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts index 28e7e70070e87..f39d649f8864b 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts @@ -23,6 +23,7 @@ export { timeComparePivotOperator } from './timeComparePivotOperator'; export { sortOperator } from './sortOperator'; export { pivotOperator } from './pivotOperator'; export { resampleOperator } from './resampleOperator'; +export { renameOperator } from './renameOperator'; export { contributionOperator } from './contributionOperator'; export { prophetOperator } from './prophetOperator'; export { boxplotOperator } from './boxplotOperator'; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/renameOperator.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/renameOperator.ts new file mode 100644 index 0000000000000..94dfa70bbc8f2 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/renameOperator.ts @@ -0,0 +1,89 @@ +/* eslint-disable camelcase */ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitationsxw + * under the License. + */ +import { + PostProcessingRename, + ensureIsArray, + getMetricLabel, + ComparisionType, +} from '@superset-ui/core'; +import { PostProcessingFactory } from './types'; +import { getMetricOffsetsMap, isTimeComparison } from './utils'; + +export const renameOperator: PostProcessingFactory = ( + formData, + queryObject, +) => { + const metrics = ensureIsArray(queryObject.metrics); + const columns = ensureIsArray(queryObject.columns); + const { x_axis: xAxis } = formData; + // remove or rename top level of column name(metric name) in the MultiIndex when + // 1) only 1 metric + // 2) exist dimentsion + // 3) exist xAxis + // 4) exist time comparison, and comparison type is "actual values" + if ( + metrics.length === 1 && + columns.length > 0 && + (xAxis || queryObject.is_timeseries) && + !( + // todo: we should provide an approach to handle derived metrics + ( + isTimeComparison(formData, queryObject) && + [ + ComparisionType.Difference, + ComparisionType.Ratio, + ComparisionType.Percentage, + ].includes(formData.comparison_type) + ) + ) + ) { + const renamePairs: [string, string | null][] = []; + + if ( + // "actual values" will add derived metric. + // we will rename the "metric" from the metricWithOffset label + // for example: "count__1 year ago" => "1 year ago" + isTimeComparison(formData, queryObject) && + formData.comparison_type === ComparisionType.Values + ) { + const metricOffsetMap = getMetricOffsetsMap(formData, queryObject); + const timeOffsets = ensureIsArray(formData.time_compare); + [...metricOffsetMap.keys()].forEach(metricWithOffset => { + const offsetLabel = timeOffsets.find(offset => + metricWithOffset.includes(offset), + ); + renamePairs.push([metricWithOffset, offsetLabel]); + }); + } + + renamePairs.push([getMetricLabel(metrics[0]), null]); + + return { + operation: 'rename', + options: { + columns: Object.fromEntries(renamePairs), + level: 0, + inplace: true, + }, + }; + } + + return undefined; +}; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/rollingWindowOperator.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/rollingWindowOperator.ts index 563b3e0544faa..0ab459e5cae03 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/rollingWindowOperator.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/rollingWindowOperator.ts @@ -24,14 +24,14 @@ import { PostProcessingRolling, RollingType, } from '@superset-ui/core'; -import { getMetricOffsetsMap, isValidTimeCompare } from './utils'; +import { getMetricOffsetsMap, isTimeComparison } from './utils'; import { PostProcessingFactory } from './types'; export const rollingWindowOperator: PostProcessingFactory< PostProcessingRolling | PostProcessingCum > = (formData, queryObject) => { let columns: (string | undefined)[]; - if (isValidTimeCompare(formData, queryObject)) { + if (isTimeComparison(formData, queryObject)) { const metricsMap = getMetricOffsetsMap(formData, queryObject); columns = [ ...Array.from(metricsMap.values()), diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeCompareOperator.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeCompareOperator.ts index ec62384615f74..3fe253edfdfd1 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeCompareOperator.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeCompareOperator.ts @@ -18,7 +18,7 @@ * under the License. */ import { ComparisionType, PostProcessingCompare } from '@superset-ui/core'; -import { getMetricOffsetsMap, isValidTimeCompare } from './utils'; +import { getMetricOffsetsMap, isTimeComparison } from './utils'; import { PostProcessingFactory } from './types'; export const timeCompareOperator: PostProcessingFactory = @@ -27,7 +27,7 @@ export const timeCompareOperator: PostProcessingFactory = const metricOffsetMap = getMetricOffsetsMap(formData, queryObject); if ( - isValidTimeCompare(formData, queryObject) && + isTimeComparison(formData, queryObject) && comparisonType !== ComparisionType.Values ) { return { diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeComparePivotOperator.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeComparePivotOperator.ts index 44a1825ff8ee5..f7bbd238c6f54 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeComparePivotOperator.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/timeComparePivotOperator.ts @@ -24,14 +24,14 @@ import { NumpyFunction, PostProcessingPivot, } from '@superset-ui/core'; -import { getMetricOffsetsMap, isValidTimeCompare } from './utils'; +import { getMetricOffsetsMap, isTimeComparison } from './utils'; import { PostProcessingFactory } from './types'; export const timeComparePivotOperator: PostProcessingFactory = (formData, queryObject) => { const metricOffsetMap = getMetricOffsetsMap(formData, queryObject); - if (isValidTimeCompare(formData, queryObject)) { + if (isTimeComparison(formData, queryObject)) { const aggregates = Object.fromEntries( [...metricOffsetMap.values(), ...metricOffsetMap.keys()].map(metric => [ metric, diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/index.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/index.ts index d591dbd23edde..e4dfbd776908d 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/index.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/index.ts @@ -18,5 +18,5 @@ * under the License. */ export { getMetricOffsetsMap } from './getMetricOffsetsMap'; -export { isValidTimeCompare } from './isValidTimeCompare'; +export { isTimeComparison } from './isTimeComparison'; export { TIME_COMPARISON_SEPARATOR } from './constants'; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/isValidTimeCompare.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/isTimeComparison.ts similarity index 94% rename from superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/isValidTimeCompare.ts rename to superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/isTimeComparison.ts index 793bb392315d8..4430b9541cdbb 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/isValidTimeCompare.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/utils/isTimeComparison.ts @@ -21,7 +21,7 @@ import { ComparisionType } from '@superset-ui/core'; import { getMetricOffsetsMap } from './getMetricOffsetsMap'; import { PostProcessingFactory } from '../types'; -export const isValidTimeCompare: PostProcessingFactory = ( +export const isTimeComparison: PostProcessingFactory = ( formData, queryObject, ) => { diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/sections/advancedAnalytics.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/sections/advancedAnalytics.tsx index ebd118d88122c..3d562309ca948 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/sections/advancedAnalytics.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/sections/advancedAnalytics.tsx @@ -30,7 +30,7 @@ export const advancedAnalyticsControls: ControlPanelSectionConfig = { 'of query results', ), controlSetRows: [ - [

{t('Rolling window')}

], + [
{t('Rolling window')}
], [ { name: 'rolling_type', @@ -85,7 +85,7 @@ export const advancedAnalyticsControls: ControlPanelSectionConfig = { }, }, ], - [

{t('Time comparison')}

], + [
{t('Time comparison')}
], [ { name: 'time_compare', @@ -136,7 +136,7 @@ export const advancedAnalyticsControls: ControlPanelSectionConfig = { }, }, ], - [

{t('Resample')}

], + [
{t('Resample')}
], [ { name: 'resample_rule', diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/sections/chartTitle.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/sections/chartTitle.tsx index 5e99d976c55b3..314e983c589ae 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/sections/chartTitle.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/sections/chartTitle.tsx @@ -28,7 +28,7 @@ export const titleControls: ControlPanelSectionConfig = { tabOverride: 'customize', expanded: true, controlSetRows: [ - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { name: 'x_axis_title', @@ -56,7 +56,7 @@ export const titleControls: ControlPanelSectionConfig = { }, }, ], - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], [ { name: 'y_axis_title', diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/components/ColumnConfigControl/ColumnConfigItem.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/components/ColumnConfigControl/ColumnConfigItem.tsx index 06429ef593a5b..f28d5b8d2332d 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/components/ColumnConfigControl/ColumnConfigItem.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/components/ColumnConfigControl/ColumnConfigItem.tsx @@ -48,8 +48,10 @@ export default React.memo(function ColumnConfigItem({ >
{ expect(flattenOperator(formData, queryObject)).toEqual({ operation: 'flatten', - options: { - drop_levels: [], - }, - }); -}); - -test('should add drop level', () => { - expect(flattenOperator(formData, singleMetricQueryObject)).toEqual({ - operation: 'flatten', - options: { - drop_levels: [0], - }, }); }); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/utils/operators/renameOperator.test.ts b/superset-frontend/packages/superset-ui-chart-controls/test/utils/operators/renameOperator.test.ts new file mode 100644 index 0000000000000..2c32e0791ba17 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/test/utils/operators/renameOperator.test.ts @@ -0,0 +1,146 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { ComparisionType, QueryObject, SqlaFormData } from '@superset-ui/core'; +import { renameOperator } from '@superset-ui/chart-controls'; + +const formData: SqlaFormData = { + x_axis: 'dttm', + metrics: ['count(*)'], + groupby: ['gender'], + time_range: '2015 : 2016', + granularity: 'month', + datasource: 'foo', + viz_type: 'table', +}; +const queryObject: QueryObject = { + is_timeseries: true, + metrics: ['count(*)'], + columns: ['gender', 'dttm'], + time_range: '2015 : 2016', + granularity: 'month', + post_processing: [], +}; + +test('should skip renameOperator if exists multiple metrics', () => { + expect( + renameOperator(formData, { + ...queryObject, + ...{ + metrics: ['count(*)', 'sum(sales)'], + }, + }), + ).toEqual(undefined); +}); + +test('should skip renameOperator if does not exist series', () => { + expect( + renameOperator(formData, { + ...queryObject, + ...{ + columns: [], + }, + }), + ).toEqual(undefined); +}); + +test('should skip renameOperator if does not exist x_axis and is_timeseries', () => { + expect( + renameOperator( + { + ...formData, + ...{ x_axis: null }, + }, + { ...queryObject, ...{ is_timeseries: false } }, + ), + ).toEqual(undefined); +}); + +test('should skip renameOperator if exists derived metrics', () => { + [ + ComparisionType.Difference, + ComparisionType.Ratio, + ComparisionType.Percentage, + ].forEach(type => { + expect( + renameOperator( + { + ...formData, + ...{ + comparison_type: type, + time_compare: ['1 year ago'], + }, + }, + { + ...queryObject, + ...{ + metrics: ['count(*)'], + }, + }, + ), + ).toEqual(undefined); + }); +}); + +test('should add renameOperator', () => { + expect(renameOperator(formData, queryObject)).toEqual({ + operation: 'rename', + options: { columns: { 'count(*)': null }, inplace: true, level: 0 }, + }); +}); + +test('should add renameOperator if does not exist x_axis', () => { + expect( + renameOperator( + { + ...formData, + ...{ x_axis: null }, + }, + queryObject, + ), + ).toEqual({ + operation: 'rename', + options: { columns: { 'count(*)': null }, inplace: true, level: 0 }, + }); +}); + +test('should add renameOperator if exist "actual value" time comparison', () => { + expect( + renameOperator( + { + ...formData, + ...{ + comparison_type: ComparisionType.Values, + time_compare: ['1 year ago', '1 year later'], + }, + }, + queryObject, + ), + ).toEqual({ + operation: 'rename', + options: { + columns: { + 'count(*)': null, + 'count(*)__1 year ago': '1 year ago', + 'count(*)__1 year later': '1 year later', + }, + inplace: true, + level: 0, + }, + }); +}); diff --git a/superset-frontend/packages/superset-ui-core/package.json b/superset-frontend/packages/superset-ui-core/package.json index 13e29e54a8c09..424f3b877620c 100644 --- a/superset-frontend/packages/superset-ui-core/package.json +++ b/superset-frontend/packages/superset-ui-core/package.json @@ -36,7 +36,7 @@ "@types/d3-format": "^1.3.0", "@types/d3-interpolate": "^1.3.1", "@types/d3-scale": "^2.1.1", - "@types/d3-time": "^1.0.9", + "@types/d3-time": "^3.0.0", "@types/d3-time-format": "^2.1.0", "@types/lodash": "^4.14.149", "@types/math-expression-evaluator": "^1.2.1", diff --git a/superset-frontend/packages/superset-ui-core/src/connection/SupersetClientClass.ts b/superset-frontend/packages/superset-ui-core/src/connection/SupersetClientClass.ts index 5e046bce7e929..7a6dfd97b0207 100644 --- a/superset-frontend/packages/superset-ui-core/src/connection/SupersetClientClass.ts +++ b/superset-frontend/packages/superset-ui-core/src/connection/SupersetClientClass.ts @@ -35,9 +35,7 @@ import { DEFAULT_FETCH_RETRY_OPTIONS, DEFAULT_BASE_URL } from './constants'; const defaultUnauthorizedHandler = () => { if (!window.location.pathname.startsWith('/login')) { - window.location.href = `/login?next=${ - window.location.pathname + window.location.search - }`; + window.location.href = `/login?next=${window.location.href}`; } }; diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts b/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts index 0ba7e4fc4af59..315cdb8456cda 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts @@ -201,6 +201,18 @@ export type PostProcessingResample = | _PostProcessingResample | DefaultPostProcessing; +interface _PostProcessingRename { + operation: 'rename'; + options: { + columns: Record; + inplace?: boolean; + level?: number | string; + }; +} +export type PostProcessingRename = + | _PostProcessingRename + | DefaultPostProcessing; + interface _PostProcessingFlatten { operation: 'flatten'; options?: { @@ -228,6 +240,7 @@ export type PostProcessingRule = | PostProcessingCompare | PostProcessingSort | PostProcessingResample + | PostProcessingRename | PostProcessingFlatten; export function isPostProcessingAggregation( diff --git a/superset-frontend/packages/superset-ui-core/test/connection/SupersetClientClass.test.ts b/superset-frontend/packages/superset-ui-core/test/connection/SupersetClientClass.test.ts index a17cbceb223d7..ef31e5d35d857 100644 --- a/superset-frontend/packages/superset-ui-core/test/connection/SupersetClientClass.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/connection/SupersetClientClass.test.ts @@ -505,8 +505,7 @@ describe('SupersetClientClass', () => { const mockRequestUrl = 'https://host/get/url'; const mockRequestPath = '/get/url'; const mockRequestSearch = '?param=1¶m=2'; - const mockRequestRelativeUrl = mockRequestPath + mockRequestSearch; - const mockHref = `http://localhost${mockRequestRelativeUrl}`; + const mockHref = mockRequestUrl + mockRequestSearch; beforeEach(() => { originalLocation = window.location; @@ -542,7 +541,7 @@ describe('SupersetClientClass', () => { error = err; } finally { const redirectURL = window.location.href; - expect(redirectURL).toBe(`/login?next=${mockRequestRelativeUrl}`); + expect(redirectURL).toBe(`/login?next=${mockHref}`); expect(error.status).toBe(401); } }); diff --git a/superset-frontend/plugins/legacy-plugin-chart-partition/src/controlPanel.tsx b/superset-frontend/plugins/legacy-plugin-chart-partition/src/controlPanel.tsx index c742e6d1335cb..93139f7ff7b8d 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-partition/src/controlPanel.tsx +++ b/superset-frontend/plugins/legacy-plugin-chart-partition/src/controlPanel.tsx @@ -240,7 +240,7 @@ const config: ControlPanelConfig = { ), controlSetRows: [ // eslint-disable-next-line react/jsx-key - [

{t('Rolling Window')}

], + [
{t('Rolling Window')}
], [ { name: 'rolling_type', @@ -292,7 +292,7 @@ const config: ControlPanelConfig = { }, ], // eslint-disable-next-line react/jsx-key - [

{t('Time Comparison')}

], + [
{t('Time Comparison')}
], [ { name: 'time_compare', @@ -341,10 +341,7 @@ const config: ControlPanelConfig = { }, }, ], - // eslint-disable-next-line react/jsx-key - [

{t('Python Functions')}

], - // eslint-disable-next-line react/jsx-key - [

pandas.resample

], + [
{t('Resample')}
], [ { name: 'resample_rule', diff --git a/superset-frontend/plugins/legacy-plugin-chart-rose/src/controlPanel.tsx b/superset-frontend/plugins/legacy-plugin-chart-rose/src/controlPanel.tsx index fd04117e6217c..e43da2de7237a 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-rose/src/controlPanel.tsx +++ b/superset-frontend/plugins/legacy-plugin-chart-rose/src/controlPanel.tsx @@ -123,7 +123,7 @@ const config: ControlPanelConfig = { ), controlSetRows: [ // eslint-disable-next-line react/jsx-key - [

{t('Rolling Window')}

], + [
{t('Rolling Window')}
], [ { name: 'rolling_type', @@ -175,7 +175,7 @@ const config: ControlPanelConfig = { }, ], // eslint-disable-next-line react/jsx-key - [

{t('Time Comparison')}

], + [
{t('Time Comparison')}
], [ { name: 'time_compare', @@ -224,10 +224,7 @@ const config: ControlPanelConfig = { }, }, ], - // eslint-disable-next-line react/jsx-key - [

{t('Python Functions')}

], - // eslint-disable-next-line react/jsx-key - [

pandas.resample

], + [
{t('Resample')}
], [ { name: 'resample_rule', diff --git a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js index 0c81e98560166..c7253e10d0e68 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js +++ b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js @@ -23,7 +23,6 @@ import { extent as d3Extent } from 'd3-array'; import { getNumberFormatter, getSequentialSchemeRegistry, - CategoricalColorNamespace, } from '@superset-ui/core'; import Datamap from 'datamaps/dist/datamaps.world.min'; @@ -56,8 +55,6 @@ function WorldMap(element, props) { showBubbles, linearColorScheme, color, - colorScheme, - sliceId, } = props; const div = d3.select(element); div.classed('superset-legacy-chart-world-map', true); @@ -72,24 +69,15 @@ function WorldMap(element, props) { .domain([extRadius[0], extRadius[1]]) .range([1, maxBubbleSize]); - const linearColorScale = getSequentialSchemeRegistry() + const colorScale = getSequentialSchemeRegistry() .get(linearColorScheme) .createLinearScale(d3Extent(filteredData, d => d.m1)); - const colorScale = CategoricalColorNamespace.getScale(colorScheme); - - const processedData = filteredData.map(d => { - let color = linearColorScale(d.m1); - if (colorScheme) { - // use color scheme instead - color = colorScale(d.name, sliceId); - } - return { - ...d, - radius: radiusScale(Math.sqrt(d.m2)), - fillColor: color, - }; - }); + const processedData = filteredData.map(d => ({ + ...d, + radius: radiusScale(Math.sqrt(d.m2)), + fillColor: colorScale(d.m1), + })); const mapData = {}; processedData.forEach(d => { diff --git a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/controlPanel.ts b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/controlPanel.ts index 91664290dcb02..ec8aafc7b872a 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/controlPanel.ts +++ b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/controlPanel.ts @@ -106,7 +106,6 @@ const config: ControlPanelConfig = { }, ], ['color_picker'], - ['color_scheme'], ['linear_color_scheme'], ], }, @@ -127,9 +126,6 @@ const config: ControlPanelConfig = { color_picker: { label: t('Bubble Color'), }, - color_scheme: { - label: t('Categorical Color Scheme'), - }, linear_color_scheme: { label: t('Country Color Scheme'), }, diff --git a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/transformProps.js b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/transformProps.js index 3838ebfa5c10a..464dd53afa4fc 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/transformProps.js +++ b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/transformProps.js @@ -20,14 +20,8 @@ import { rgb } from 'd3-color'; export default function transformProps(chartProps) { const { width, height, formData, queriesData } = chartProps; - const { - maxBubbleSize, - showBubbles, - linearColorScheme, - colorPicker, - colorScheme, - sliceId, - } = formData; + const { maxBubbleSize, showBubbles, linearColorScheme, colorPicker } = + formData; const { r, g, b } = colorPicker; return { @@ -38,7 +32,5 @@ export default function transformProps(chartProps) { showBubbles, linearColorScheme, color: rgb(r, g, b).hex(), - colorScheme, - sliceId, }; } diff --git a/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Controls.tsx b/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Controls.tsx index 3b0bb92ac758b..151c53e41f2ff 100644 --- a/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Controls.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Controls.tsx @@ -370,7 +370,7 @@ export const timeSeriesSection: ControlPanelSectionConfig[] = [ 'of query results', ), controlSetRows: [ - [

{t('Rolling Window')}

], + [
{t('Rolling Window')}
], [ { name: 'rolling_type', @@ -423,7 +423,7 @@ export const timeSeriesSection: ControlPanelSectionConfig[] = [ }, }, ], - [

{t('Time Comparison')}

], + [
{t('Time Comparison')}
], [ { name: 'time_compare', @@ -474,8 +474,7 @@ export const timeSeriesSection: ControlPanelSectionConfig[] = [ }, }, ], - [

{t('Python Functions')}

], - [

pandas.resample

], + [
{t('Resample')}
], [ { name: 'resample_rule', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts index e30dcbe6bee6d..8511c3ca5645e 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts @@ -34,7 +34,7 @@ export default { controlSetRows: [['metric'], ['adhoc_filters']], }, { - label: t('Options'), + label: t('Display settings'), expanded: true, tabOverride: 'data', controlSetRows: [ diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx index 6b99af91ce9c4..3ba00f55ea212 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx @@ -164,7 +164,7 @@ const config: ControlPanelConfig = { expanded: false, controlSetRows: [ // eslint-disable-next-line react/jsx-key - [

{t('Rolling Window')}

], + [
{t('Rolling Window')}
], [ { name: 'rolling_type', @@ -217,8 +217,7 @@ const config: ControlPanelConfig = { }, }, ], - // eslint-disable-next-line react/jsx-key - [

{t('Resample')}

], + [
{t('Resample')}
], [ { name: 'resample_rule', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/controlPanel.tsx index e1950bf9a5b37..fe2269cf89c05 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/controlPanel.tsx @@ -76,7 +76,7 @@ const config: ControlPanelConfig = { ['color_scheme'], ...funnelLegendSection, // eslint-disable-next-line react/jsx-key - [

{t('Labels')}

], + [
{t('Labels')}
], [ { name: 'label_type', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/controlPanel.tsx index 581d98c6b99f7..ff03da4153b18 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/controlPanel.tsx @@ -75,7 +75,7 @@ const config: ControlPanelConfig = { label: t('Chart Options'), expanded: true, controlSetRows: [ - [

{t('General')}

], + [
{t('General')}
], [ { name: 'min_val', @@ -197,7 +197,7 @@ const config: ControlPanelConfig = { }, }, ], - [

{t('Axis')}

], + [
{t('Axis')}
], [ { name: 'show_axis_tick', @@ -236,7 +236,7 @@ const config: ControlPanelConfig = { }, }, ], - [

{t('Progress')}

], + [
{t('Progress')}
], [ { name: 'show_progress', @@ -277,7 +277,7 @@ const config: ControlPanelConfig = { }, }, ], - [

{t('Intervals')}

], + [
{t('Intervals')}
], [ { name: 'intervals', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/controlPanel.tsx index cdefae16cab54..cb2f586110177 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/controlPanel.tsx @@ -98,7 +98,7 @@ const controlPanel: ControlPanelConfig = { controlSetRows: [ ['color_scheme'], ...legendSection, - [

{t('Layout')}

], + [
{t('Layout')}
], [ { name: 'layout', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/buildQuery.ts b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/buildQuery.ts index b85feb1eee5fa..9adc149489a27 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/buildQuery.ts @@ -22,7 +22,11 @@ import { QueryObject, normalizeOrderBy, } from '@superset-ui/core'; -import { flattenOperator, pivotOperator } from '@superset-ui/chart-controls'; +import { + pivotOperator, + renameOperator, + flattenOperator, +} from '@superset-ui/chart-controls'; export default function buildQuery(formData: QueryFormData) { const { @@ -66,7 +70,11 @@ export default function buildQuery(formData: QueryFormData) { is_timeseries: true, post_processing: [ pivotOperator(formData1, { ...baseQueryObject, is_timeseries: true }), - flattenOperator(formData1, { ...baseQueryObject, is_timeseries: true }), + renameOperator(formData1, { + ...baseQueryObject, + ...{ is_timeseries: true }, + }), + flattenOperator(formData1, baseQueryObject), ], } as QueryObject; return [normalizeOrderBy(queryObjectA)]; @@ -78,7 +86,11 @@ export default function buildQuery(formData: QueryFormData) { is_timeseries: true, post_processing: [ pivotOperator(formData2, { ...baseQueryObject, is_timeseries: true }), - flattenOperator(formData2, { ...baseQueryObject, is_timeseries: true }), + renameOperator(formData2, { + ...baseQueryObject, + ...{ is_timeseries: true }, + }), + flattenOperator(formData2, baseQueryObject), ], } as QueryObject; return [normalizeOrderBy(queryObjectB)]; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx index 8cd681c5e33e1..97955eec3500c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx @@ -126,7 +126,7 @@ function createCustomizeSection( controlSuffix: string, ): ControlSetRow[] { return [ - [

{label}

], + [
{label}
], [ { name: `seriesType${controlSuffix}`, @@ -296,7 +296,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], ['x_axis_time_format'], [ { @@ -320,7 +320,7 @@ const config: ControlPanelConfig = { ], ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], [ { name: 'minorSplitLine', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Pie/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Pie/controlPanel.tsx index c195c5e2214d9..9056446f9f412 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Pie/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Pie/controlPanel.tsx @@ -90,7 +90,7 @@ const config: ControlPanelConfig = { ], ...legendSection, // eslint-disable-next-line react/jsx-key - [

{t('Labels')}

], + [
{t('Labels')}
], [ { name: 'label_type', @@ -196,7 +196,7 @@ const config: ControlPanelConfig = { }, ], // eslint-disable-next-line react/jsx-key - [

{t('Pie shape')}

], + [
{t('Pie shape')}
], [ { name: 'outerRadius', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Radar/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Radar/controlPanel.tsx index 0f8e390802a56..d24497280ae6b 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Radar/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Radar/controlPanel.tsx @@ -85,7 +85,7 @@ const config: ControlPanelConfig = { controlSetRows: [ ['color_scheme'], ...legendSection, - [

{t('Labels')}

], + [
{t('Labels')}
], [ { name: 'show_labels', @@ -158,7 +158,7 @@ const config: ControlPanelConfig = { }, }, ], - [

{t('Radar')}

], + [
{t('Radar')}
], [ { name: 'column_config', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx index 87503166b7977..b973cb6782c03 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx @@ -178,7 +178,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { name: 'x_axis_time_format', @@ -213,7 +213,7 @@ const config: ControlPanelConfig = { ], ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], ['y_axis_format'], [ { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx index bd40eeebe0e75..a3b74aa12f4fe 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx @@ -139,7 +139,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { name: 'x_axis_time_format', @@ -175,7 +175,7 @@ const config: ControlPanelConfig = { // eslint-disable-next-line react/jsx-key ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], ['y_axis_format'], [ diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx index 4cdf16c8395a2..abc5e9a29e724 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx @@ -119,7 +119,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { @@ -156,7 +156,7 @@ const config: ControlPanelConfig = { // eslint-disable-next-line react/jsx-key ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], ['y_axis_format'], [ { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/controlPanel.tsx index d2f3acce9e08f..f234df0c82b4a 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/controlPanel.tsx @@ -136,7 +136,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { name: 'x_axis_time_format', @@ -172,7 +172,7 @@ const config: ControlPanelConfig = { // eslint-disable-next-line react/jsx-key ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], ['y_axis_format'], [ diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx index 1416a7db4686c..b8d3a31b2c295 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx @@ -194,7 +194,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { name: 'x_axis_time_format', @@ -229,7 +229,7 @@ const config: ControlPanelConfig = { ], ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], ['y_axis_format'], [ { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/buildQuery.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/buildQuery.ts index c4cdaa9360a64..3478c73470fc7 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/buildQuery.ts @@ -27,9 +27,10 @@ import { import { rollingWindowOperator, timeCompareOperator, - isValidTimeCompare, + isTimeComparison, pivotOperator, resampleOperator, + renameOperator, contributionOperator, prophetOperator, timeComparePivotOperator, @@ -60,7 +61,7 @@ export default function buildQuery(formData: QueryFormData) { 2015-03-01 318.0 0.0 */ - const pivotOperatorInRuntime: PostProcessingPivot = isValidTimeCompare( + const pivotOperatorInRuntime: PostProcessingPivot = isTimeComparison( formData, baseQueryObject, ) @@ -79,7 +80,7 @@ export default function buildQuery(formData: QueryFormData) { is_timeseries, // todo: move `normalizeOrderBy to extractQueryFields` orderby: normalizeOrderBy(baseQueryObject).orderby, - time_offsets: isValidTimeCompare(formData, baseQueryObject) + time_offsets: isTimeComparison(formData, baseQueryObject) ? formData.time_compare : [], /* Note that: @@ -91,7 +92,12 @@ export default function buildQuery(formData: QueryFormData) { rollingWindowOperator(formData, baseQueryObject), timeCompareOperator(formData, baseQueryObject), resampleOperator(formData, baseQueryObject), + renameOperator(formData, { + ...baseQueryObject, + ...{ is_timeseries }, + }), flattenOperator(formData, baseQueryObject), + // todo: move contribution and prophet before flatten contributionOperator(formData, baseQueryObject), prophetOperator(formData, baseQueryObject), ], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/controlPanel.tsx index 1f1e22b49b3a5..8f22acadeefc3 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/controlPanel.tsx @@ -197,7 +197,7 @@ const config: ControlPanelConfig = { }, ], ...legendSection, - [

{t('X Axis')}

], + [
{t('X Axis')}
], [ { name: 'x_axis_time_format', @@ -232,7 +232,7 @@ const config: ControlPanelConfig = { ], ...richTooltipSection, // eslint-disable-next-line react/jsx-key - [

{t('Y Axis')}

], + [
{t('Y Axis')}
], ['y_axis_format'], [ { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/controlPanel.tsx index aa4a38fca871b..cd48e0f636e0b 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/controlPanel.tsx @@ -107,7 +107,7 @@ const controlPanel: ControlPanelConfig = { label: t('Chart options'), expanded: true, controlSetRows: [ - [

{t('Layout')}

], + [
{t('Layout')}
], [ { name: 'layout', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/controlPanel.tsx index 9f6d4e297e031..63ca40225ffe6 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/controlPanel.tsx @@ -62,7 +62,7 @@ const config: ControlPanelConfig = { expanded: true, controlSetRows: [ ['color_scheme'], - [

{t('Labels')}

], + [
{t('Labels')}
], [ { name: 'show_labels', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx index 053d0db8359fd..df050e6dbb418 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx @@ -94,7 +94,7 @@ const legendOrientationControl: ControlSetItem = { }; export const legendSection: ControlSetRow[] = [ - [

{t('Legend')}

], + [
{t('Legend')}
], [showLegendControl], [legendTypeControl], [legendOrientationControl], @@ -219,7 +219,7 @@ const tooltipSortByMetricControl: ControlSetItem = { }; export const richTooltipSection: ControlSetRow[] = [ - [

{t('Tooltip')}

], + [
{t('Tooltip')}
], [richTooltipControl], [tooltipSortByMetricControl], [tooltipTimeFormatControl], diff --git a/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx index 5b9abfb163d9b..c121547518e46 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx @@ -117,6 +117,7 @@ const all_columns: typeof sharedControls.groupby = { : [], }), visibility: isRawMode, + resetOnHide: false, }; const dnd_all_columns: typeof sharedControls.groupby = { @@ -140,6 +141,7 @@ const dnd_all_columns: typeof sharedControls.groupby = { return newState; }, visibility: isRawMode, + resetOnHide: false, }; const percent_metrics: typeof sharedControls.metrics = { @@ -150,6 +152,7 @@ const percent_metrics: typeof sharedControls.metrics = { ), multi: true, visibility: isAggMode, + resetOnHide: false, mapStateToProps: ({ datasource, controls }, controlState) => ({ columns: datasource?.columns || [], savedMetrics: datasource?.metrics || [], @@ -190,6 +193,7 @@ const config: ControlPanelConfig = { name: 'groupby', override: { visibility: isAggMode, + resetOnHide: false, mapStateToProps: ( state: ControlPanelState, controlState: ControlState, @@ -220,6 +224,7 @@ const config: ControlPanelConfig = { override: { validators: [], visibility: isAggMode, + resetOnHide: false, mapStateToProps: ( { controls, datasource, form_data }: ControlPanelState, controlState: ControlState, @@ -263,6 +268,7 @@ const config: ControlPanelConfig = { name: 'timeseries_limit_metric', override: { visibility: isAggMode, + resetOnHide: false, }, }, { @@ -277,6 +283,7 @@ const config: ControlPanelConfig = { choices: datasource?.order_by_choices || [], }), visibility: isRawMode, + resetOnHide: false, }, }, ], @@ -329,6 +336,7 @@ const config: ControlPanelConfig = { ), default: false, visibility: isAggMode, + resetOnHide: false, }, }, { @@ -339,6 +347,7 @@ const config: ControlPanelConfig = { default: true, description: t('Whether to sort descending or ascending'), visibility: isAggMode, + resetOnHide: false, }, }, ], @@ -353,6 +362,7 @@ const config: ControlPanelConfig = { 'Show total aggregations of selected metrics. Note that row limit does not apply to the result.', ), visibility: isAggMode, + resetOnHide: false, }, }, ], diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js index 3d1298e6c3b73..41717dd17488b 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.js @@ -1279,6 +1279,7 @@ export function popSavedQuery(saveQueryId) { .then(({ json }) => { const queryEditorProps = { ...convertQueryToClient(json.result), + loaded: true, autorun: false, }; return dispatch(addQueryEditor(queryEditorProps)); diff --git a/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx b/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx index 35722ba866066..53ec3f808a62f 100644 --- a/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx +++ b/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx @@ -66,7 +66,6 @@ interface Props { interface State { sql: string; - selectedText: string; words: AceCompleterKeyword[]; } @@ -80,13 +79,20 @@ class AceEditorWrapper extends React.PureComponent { extendedTables: [], }; + private currentSelectionCache; + constructor(props: Props) { super(props); this.state = { sql: props.sql, - selectedText: '', words: [], }; + + // The editor changeSelection is called multiple times in a row, + // faster than React reconciliation process, so the selected text + // needs to be stored out of the state to ensure changes to it + // get saved immediately + this.currentSelectionCache = ''; this.onChange = this.onChange.bind(this); } @@ -146,17 +152,19 @@ class AceEditorWrapper extends React.PureComponent { editor.$blockScrolling = Infinity; // eslint-disable-line no-param-reassign editor.selection.on('changeSelection', () => { const selectedText = editor.getSelectedText(); + // Backspace trigger 1 character selection, ignoring if ( - selectedText !== this.state.selectedText && + selectedText !== this.currentSelectionCache && selectedText.length !== 1 ) { - this.setState({ selectedText }); this.props.actions.queryEditorSetSelectedText( this.props.queryEditor, selectedText, ); } + + this.currentSelectionCache = selectedText; }); } @@ -219,11 +227,15 @@ class AceEditorWrapper extends React.PureComponent { this.props.queryEditor.schema, ); } + + let { caption } = data; + if (data.meta === 'table' && caption.includes(' ')) { + caption = `"${caption}"`; + } + // executing https://github.com/thlorenz/brace/blob/3a00c5d59777f9d826841178e1eb36694177f5e6/ext/language_tools.js#L1448 editor.completer.insertMatch( - `${data.caption}${ - ['function', 'schema'].includes(data.meta) ? '' : ' ' - }`, + `${caption}${['function', 'schema'].includes(data.meta) ? '' : ' '}`, ); }, }; diff --git a/superset-frontend/src/SqlLab/components/QueryTable/index.tsx b/superset-frontend/src/SqlLab/components/QueryTable/index.tsx index dffb65a1f1072..a50779d6eb9c1 100644 --- a/superset-frontend/src/SqlLab/components/QueryTable/index.tsx +++ b/superset-frontend/src/SqlLab/components/QueryTable/index.tsx @@ -254,6 +254,8 @@ const QueryTable = ({ responsive /> ); + } else { + q.results = <>; } q.progress = diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx index f3549b547f8b1..d946c675cc8c4 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx @@ -38,6 +38,7 @@ import { queryEditorSetSelectedText, queryEditorSetSchemaOptions, } from 'src/SqlLab/actions/sqlLab'; +import { EmptyStateBig } from 'src/components/EmptyState'; import waitForComponentToPaint from 'spec/helpers/waitForComponentToPaint'; import { initialState, queries, table } from 'src/SqlLab/fixtures'; @@ -57,7 +58,19 @@ describe('SqlEditor', () => { queryEditorSetSchemaOptions, addDangerToast: jest.fn(), }, - database: {}, + database: { + allow_ctas: false, + allow_cvas: false, + allow_dml: false, + allow_file_upload: false, + allow_multi_schema_metadata_fetch: false, + allow_run_async: false, + backend: 'postgresql', + database_name: 'examples', + expose_in_sqllab: true, + force_ctas_schema: null, + id: 1, + }, queryEditorId: initialState.sqlLab.queryEditors[0].id, latestQuery: queries[0], tables: [table], @@ -80,6 +93,12 @@ describe('SqlEditor', () => { }, ); + it('does not render SqlEditor if no db selected', () => { + const database = {}; + const updatedProps = { ...mockedProps, database }; + const wrapper = buildWrapper(updatedProps); + expect(wrapper.find(EmptyStateBig)).toExist(); + }); it('render a SqlEditorLeftBar', async () => { const wrapper = buildWrapper(); await waitForComponentToPaint(wrapper); diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx b/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx index 7899cbf71908a..df1a9a77c57a6 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx @@ -66,6 +66,8 @@ import { setItem, } from 'src/utils/localStorageHelpers'; import { FeatureFlag, isFeatureEnabled } from 'src/featureFlags'; +import { EmptyStateBig } from 'src/components/EmptyState'; +import { isEmpty } from 'lodash'; import TemplateParamsEditor from '../TemplateParamsEditor'; import ConnectedSouthPane from '../SouthPane/state'; import SaveQuery from '../SaveQuery'; @@ -75,6 +77,7 @@ import ShareSqlLabQuery from '../ShareSqlLabQuery'; import SqlEditorLeftBar from '../SqlEditorLeftBar'; import AceEditorWrapper from '../AceEditorWrapper'; import RunQueryActionButton from '../RunQueryActionButton'; +import { newQueryTabName } from '../../utils/newQueryTabName'; const LIMIT_DROPDOWN = [10, 100, 1000, 10000, 100000]; const SQL_EDITOR_PADDING = 10; @@ -179,6 +182,7 @@ class SqlEditor extends React.PureComponent { ), showCreateAsModal: false, createAs: '', + showEmptyState: false, }; this.sqlEditorRef = React.createRef(); this.northPaneRef = React.createRef(); @@ -188,6 +192,7 @@ class SqlEditor extends React.PureComponent { this.onResizeEnd = this.onResizeEnd.bind(this); this.canValidateQuery = this.canValidateQuery.bind(this); this.runQuery = this.runQuery.bind(this); + this.setEmptyState = this.setEmptyState.bind(this); this.stopQuery = this.stopQuery.bind(this); this.saveQuery = this.saveQuery.bind(this); this.onSqlChanged = this.onSqlChanged.bind(this); @@ -227,7 +232,11 @@ class SqlEditor extends React.PureComponent { // We need to measure the height of the sql editor post render to figure the height of // the south pane so it gets rendered properly // eslint-disable-next-line react/no-did-mount-set-state + const db = this.props.database; this.setState({ height: this.getSqlEditorHeight() }); + if (!db || isEmpty(db)) { + this.setEmptyState(true); + } window.addEventListener('resize', this.handleWindowResize); window.addEventListener('beforeunload', this.onBeforeUnload); @@ -239,6 +248,12 @@ class SqlEditor extends React.PureComponent { }); } + componentDidUpdate() { + if (this.props.queryEditor.sql !== this.state.sql) { + this.onSqlChanged(this.props.queryEditor.sql); + } + } + componentWillUnmount() { window.removeEventListener('resize', this.handleWindowResize); window.removeEventListener('beforeunload', this.onBeforeUnload); @@ -333,10 +348,10 @@ class SqlEditor extends React.PureComponent { key: userOS === 'Windows' ? 'ctrl+q' : 'ctrl+t', descr: t('New tab'), func: () => { + const title = newQueryTabName(this.props.queryEditors || []); this.props.addQueryEditor({ ...this.props.queryEditor, - title: t('Untitled query'), - sql: '', + title, }); }, }, @@ -362,6 +377,10 @@ class SqlEditor extends React.PureComponent { return base; } + setEmptyState(bool) { + this.setState({ showEmptyState: bool }); + } + setQueryEditorSql(sql) { this.props.queryEditorSetSql(this.props.queryEditor, sql); } @@ -753,10 +772,21 @@ class SqlEditor extends React.PureComponent { queryEditor={this.props.queryEditor} tables={this.props.tables} actions={this.props.actions} + setEmptyState={this.setEmptyState} />
- {this.queryPane()} + {this.state.showEmptyState ? ( + + ) : ( + this.queryPane() + )} editor.id === props.queryEditorId, ); - return { sqlLab, ...props, queryEditor }; + return { sqlLab, ...props, queryEditor, queryEditors: sqlLab.queryEditors }; } function mapDispatchToProps(dispatch) { diff --git a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx index a50e3a3f62437..f74249465456a 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx @@ -16,7 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -import React, { useEffect, useRef, useCallback, useMemo } from 'react'; +import React, { + useEffect, + useRef, + useCallback, + useMemo, + useState, + Dispatch, + SetStateAction, +} from 'react'; import Button from 'src/components/Button'; import { t, styled, css, SupersetTheme } from '@superset-ui/core'; import Collapse from 'src/components/Collapse'; @@ -25,6 +33,7 @@ import { TableSelectorMultiple } from 'src/components/TableSelector'; import { IconTooltip } from 'src/components/IconTooltip'; import { QueryEditor } from 'src/SqlLab/types'; import { DatabaseObject } from 'src/components/DatabaseSelector'; +import { EmptyStateSmall } from 'src/components/EmptyState'; import TableElement, { Table, TableElementProps } from '../TableElement'; interface ExtendedTable extends Table { @@ -54,6 +63,8 @@ interface SqlEditorLeftBarProps { tables?: ExtendedTable[]; actions: actionsTypes & TableElementProps['actions']; database: DatabaseObject; + setEmptyState: Dispatch>; + showDisabled: boolean; } const StyledScrollbarContainer = styled.div` @@ -88,15 +99,23 @@ export default function SqlEditorLeftBar({ queryEditor, tables = [], height = 500, + setEmptyState, }: SqlEditorLeftBarProps) { // Ref needed to avoid infinite rerenders on handlers // that require and modify the queryEditor const queryEditorRef = useRef(queryEditor); + const [emptyResultsWithSearch, setEmptyResultsWithSearch] = useState(false); + useEffect(() => { queryEditorRef.current = queryEditor; }, [queryEditor]); + const onEmptyResults = (searchText?: string) => { + setEmptyResultsWithSearch(!!searchText); + }; + const onDbChange = ({ id: dbId }: { id: number }) => { + setEmptyState(false); actions.queryEditorSetDb(queryEditor, dbId); actions.queryEditorSetFunctionNames(queryEditor, dbId); }; @@ -164,6 +183,22 @@ export default function SqlEditorLeftBar({ const shouldShowReset = window.location.search === '?reset=1'; const tableMetaDataHeight = height - 130; // 130 is the height of the selects above + const emptyStateComponent = ( + + {t('Manage your databases')}{' '} + {t('here')} +

+ } + /> + ); const handleSchemaChange = useCallback( (schema: string) => { if (queryEditorRef.current) { @@ -185,6 +220,8 @@ export default function SqlEditorLeftBar({ return (
0) { - const untitledQueryNumbers = this.props.queryEditors - .filter(x => x.title.match(/^Untitled Query (\d+)$/)) - .map(x => x.title.replace('Untitled Query ', '')); - if (untitledQueryNumbers.length > 0) { - // When there are query tabs open, and at least one is called "Untitled Query #" - // Where # is a valid number - const largestNumber = Math.max.apply(null, untitledQueryNumbers); - newTitle = t('Untitled Query %s', largestNumber + 1); - } - } + const newTitle = newQueryTabName(this.props.queryEditors || []); const qe = { title: newTitle, diff --git a/superset-frontend/src/SqlLab/types.ts b/superset-frontend/src/SqlLab/types.ts index 6693089574602..e1714791638c8 100644 --- a/superset-frontend/src/SqlLab/types.ts +++ b/superset-frontend/src/SqlLab/types.ts @@ -114,6 +114,7 @@ export type RootState = { activeSouthPaneTab: string | number; // default is string; action.newQuery.id is number alerts: any[]; databases: Record; + dbConnect: boolean; offline: boolean; queries: Query[]; queryEditors: QueryEditor[]; diff --git a/superset-frontend/src/SqlLab/utils/newQueryTabName.test.ts b/superset-frontend/src/SqlLab/utils/newQueryTabName.test.ts new file mode 100644 index 0000000000000..d0d98c3cd5e29 --- /dev/null +++ b/superset-frontend/src/SqlLab/utils/newQueryTabName.test.ts @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { newQueryTabName } from './newQueryTabName'; + +const emptyEditor = { + title: '', + schema: '', + autorun: false, + sql: '', + remoteId: null, +}; + +describe('newQueryTabName', () => { + it("should return default title if queryEditor's length is 0", () => { + const defaultTitle = 'default title'; + const title = newQueryTabName([], defaultTitle); + expect(title).toEqual(defaultTitle); + }); + it('should return next available number if there are unsaved editors', () => { + const untitledQueryText = 'Untitled Query'; + const unsavedEditors = [ + { ...emptyEditor, title: `${untitledQueryText} 1` }, + { ...emptyEditor, title: `${untitledQueryText} 2` }, + ]; + + const nextTitle = newQueryTabName(unsavedEditors); + expect(nextTitle).toEqual(`${untitledQueryText} 3`); + }); +}); diff --git a/superset-frontend/src/SqlLab/utils/newQueryTabName.ts b/superset-frontend/src/SqlLab/utils/newQueryTabName.ts new file mode 100644 index 0000000000000..a719a74af59af --- /dev/null +++ b/superset-frontend/src/SqlLab/utils/newQueryTabName.ts @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { t } from '@superset-ui/core'; +import { QueryEditor } from '../types'; + +const untitledQueryRegex = /^Untitled Query (\d+)$/; // Literal notation isn't recompiled +const untitledQuery = 'Untitled Query '; + +export const newQueryTabName = ( + queryEditors: QueryEditor[], + initialTitle = `${untitledQuery}1`, +): string => { + const resultTitle = t(initialTitle); + + if (queryEditors.length > 0) { + const mappedUntitled = queryEditors.filter(qe => + qe.title.match(untitledQueryRegex), + ); + const untitledQueryNumbers = mappedUntitled.map( + qe => +qe.title.replace(untitledQuery, ''), + ); + if (untitledQueryNumbers.length > 0) { + // When there are query tabs open, and at least one is called "Untitled Query #" + // Where # is a valid number + const largestNumber: number = Math.max(...untitledQueryNumbers); + return t(`${untitledQuery}%s`, largestNumber + 1); + } + return resultTitle; + } + + return resultTitle; +}; diff --git a/superset-frontend/src/assets/images/vector.svg b/superset-frontend/src/assets/images/vector.svg new file mode 100644 index 0000000000000..0bf9c39c6ccb0 --- /dev/null +++ b/superset-frontend/src/assets/images/vector.svg @@ -0,0 +1,21 @@ + + + + diff --git a/superset-frontend/src/assets/stylesheets/less/variables.less b/superset-frontend/src/assets/stylesheets/less/variables.less index 3f4fad5572708..e997f5fb78c96 100644 --- a/superset-frontend/src/assets/stylesheets/less/variables.less +++ b/superset-frontend/src/assets/stylesheets/less/variables.less @@ -48,6 +48,7 @@ @almost-black: #263238; @gray-dark: #484848; @gray-light: #e0e0e0; +@gray-light5: #666666; @gray: #879399; @gray-bg: #f7f7f7; @gray-heading: #a3a3a3; diff --git a/superset-frontend/src/components/Chart/Chart.jsx b/superset-frontend/src/components/Chart/Chart.jsx index 35209bb94af0a..7df33d0c5d7cb 100644 --- a/superset-frontend/src/components/Chart/Chart.jsx +++ b/superset-frontend/src/components/Chart/Chart.jsx @@ -22,7 +22,6 @@ import { styled, logging, t, ensureIsArray } from '@superset-ui/core'; import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags'; import { PLACEHOLDER_DATASOURCE } from 'src/dashboard/constants'; -import Button from 'src/components/Button'; import Loading from 'src/components/Loading'; import { EmptyStateBig } from 'src/components/EmptyState'; import ErrorBoundary from 'src/components/ErrorBoundary'; @@ -32,6 +31,7 @@ import { getUrlParam } from 'src/utils/urlUtils'; import { ResourceStatus } from 'src/hooks/apiResources/apiResources'; import ChartRenderer from './ChartRenderer'; import { ChartErrorMessage } from './ChartErrorMessage'; +import { getChartRequiredFieldsMissingMessage } from '../../utils/getChartRequiredFieldsMissingMessage'; const propTypes = { annotationData: PropTypes.object, @@ -64,7 +64,7 @@ const propTypes = { chartStackTrace: PropTypes.string, queriesResponse: PropTypes.arrayOf(PropTypes.object), triggerQuery: PropTypes.bool, - refreshOverlayVisible: PropTypes.bool, + chartIsStale: PropTypes.bool, errorMessage: PropTypes.node, // dashboard callbacks addFilter: PropTypes.func, @@ -108,20 +108,8 @@ const Styles = styled.div` } `; -const RefreshOverlayWrapper = styled.div` - position: absolute; - top: 0; - left: 0; - width: 100%; - height: 100%; - display: flex; - align-items: center; - justify-content: center; -`; - const MonospaceDiv = styled.div` font-family: ${({ theme }) => theme.typography.families.monospace}; - white-space: pre; word-break: break-word; overflow-x: auto; white-space: pre-wrap; @@ -255,34 +243,23 @@ class Chart extends React.PureComponent { chartAlert, chartStatus, errorMessage, - onQuery, - refreshOverlayVisible, + chartIsStale, queriesResponse = [], isDeactivatedViz = false, width, } = this.props; const isLoading = chartStatus === 'loading'; - const isFaded = refreshOverlayVisible && !errorMessage; this.renderContainerStartTime = Logger.getTimestamp(); if (chartStatus === 'failed') { return queriesResponse.map(item => this.renderErrorMessage(item)); } - if (errorMessage) { - const description = isFeatureEnabled( - FeatureFlag.ENABLE_EXPLORE_DRAG_AND_DROP, - ) - ? t( - 'Drag and drop values into highlighted field(s) on the left control panel and run query', - ) - : t( - 'Select values in highlighted field(s) on the left control panel and run query', - ); + if (errorMessage && ensureIsArray(queriesResponse).length === 0) { return ( ); @@ -291,15 +268,24 @@ class Chart extends React.PureComponent { if ( !isLoading && !chartAlert && - isFaded && + !errorMessage && + chartIsStale && ensureIsArray(queriesResponse).length === 0 ) { return ( + {t( + 'Click on "Create chart" button in the control panel on the left to preview a visualization or', + )}{' '} + + {t('click here')} + + . + + } image="chart.svg" /> ); @@ -317,25 +303,13 @@ class Chart extends React.PureComponent { height={height} width={width} > -
+
- - {!isLoading && !chartAlert && isFaded && ( - - - - )} - {isLoading && !isDeactivatedViz && } diff --git a/superset-frontend/src/components/Chart/ChartRenderer.jsx b/superset-frontend/src/components/Chart/ChartRenderer.jsx index b814b6fde6d36..45feb6ffd57ee 100644 --- a/superset-frontend/src/components/Chart/ChartRenderer.jsx +++ b/superset-frontend/src/components/Chart/ChartRenderer.jsx @@ -30,6 +30,7 @@ const propTypes = { datasource: PropTypes.object, initialValues: PropTypes.object, formData: PropTypes.object.isRequired, + latestQueryFormData: PropTypes.object, labelColors: PropTypes.object, sharedLabelColors: PropTypes.object, height: PropTypes.number, @@ -42,7 +43,7 @@ const propTypes = { chartStatus: PropTypes.string, queriesResponse: PropTypes.arrayOf(PropTypes.object), triggerQuery: PropTypes.bool, - refreshOverlayVisible: PropTypes.bool, + chartIsStale: PropTypes.bool, // dashboard callbacks addFilter: PropTypes.func, setDataMask: PropTypes.func, @@ -58,6 +59,8 @@ const BLANK = {}; const BIG_NO_RESULT_MIN_WIDTH = 300; const BIG_NO_RESULT_MIN_HEIGHT = 220; +const behaviors = [Behavior.INTERACTIVE_CHART]; + const defaultProps = { addFilter: () => BLANK, onFilterMenuOpen: () => BLANK, @@ -93,8 +96,7 @@ class ChartRenderer extends React.Component { const resultsReady = nextProps.queriesResponse && ['success', 'rendered'].indexOf(nextProps.chartStatus) > -1 && - !nextProps.queriesResponse?.[0]?.error && - !nextProps.refreshOverlayVisible; + !nextProps.queriesResponse?.[0]?.error; if (resultsReady) { this.hasQueryResponseChange = @@ -170,16 +172,10 @@ class ChartRenderer extends React.Component { } render() { - const { chartAlert, chartStatus, vizType, chartId, refreshOverlayVisible } = - this.props; + const { chartAlert, chartStatus, chartId } = this.props; // Skip chart rendering - if ( - refreshOverlayVisible || - chartStatus === 'loading' || - !!chartAlert || - chartStatus === null - ) { + if (chartStatus === 'loading' || !!chartAlert || chartStatus === null) { return null; } @@ -193,11 +189,17 @@ class ChartRenderer extends React.Component { initialValues, ownState, filterState, + chartIsStale, formData, + latestQueryFormData, queriesResponse, postTransformProps, } = this.props; + const currentFormData = + chartIsStale && latestQueryFormData ? latestQueryFormData : formData; + const vizType = currentFormData.viz_type || this.props.vizType; + // It's bad practice to use unprefixed `vizType` as classnames for chart // container. It may cause css conflicts as in the case of legacy table chart. // When migrating charts, we should gradually add a `superset-chart-` prefix @@ -255,11 +257,11 @@ class ChartRenderer extends React.Component { annotationData={annotationData} datasource={datasource} initialValues={initialValues} - formData={formData} + formData={currentFormData} ownState={ownState} filterState={filterState} hooks={this.hooks} - behaviors={[Behavior.INTERACTIVE_CHART]} + behaviors={behaviors} queriesData={queriesResponse} onRenderSuccess={this.handleRenderSuccess} onRenderFailure={this.handleRenderFailure} diff --git a/superset-frontend/src/components/Chart/ChartRenderer.test.jsx b/superset-frontend/src/components/Chart/ChartRenderer.test.jsx index 7e3a455631ff0..f3ce0415175fb 100644 --- a/superset-frontend/src/components/Chart/ChartRenderer.test.jsx +++ b/superset-frontend/src/components/Chart/ChartRenderer.test.jsx @@ -25,22 +25,25 @@ import ChartRenderer from 'src/components/Chart/ChartRenderer'; const requiredProps = { chartId: 1, datasource: {}, - formData: {}, - vizType: 'foo', + formData: { testControl: 'foo' }, + latestQueryFormData: { + testControl: 'bar', + }, + vizType: 'table', }; describe('ChartRenderer', () => { it('should render SuperChart', () => { const wrapper = shallow( - , + , ); expect(wrapper.find(SuperChart)).toExist(); }); - it('should not render SuperChart when refreshOverlayVisible is true', () => { - const wrapper = shallow( - , - ); - expect(wrapper.find(SuperChart)).not.toExist(); + it('should use latestQueryFormData instead of formData when chartIsStale is true', () => { + const wrapper = shallow(); + expect(wrapper.find(SuperChart).prop('formData')).toEqual({ + testControl: 'bar', + }); }); }); diff --git a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx index 2387c2e2517fe..272249b549600 100644 --- a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx +++ b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx @@ -21,11 +21,12 @@ import React from 'react'; import { render, screen, waitFor } from 'spec/helpers/testing-library'; import { SupersetClient } from '@superset-ui/core'; import userEvent from '@testing-library/user-event'; -import DatabaseSelector from '.'; +import DatabaseSelector, { DatabaseSelectorProps } from '.'; +import { EmptyStateSmall } from '../EmptyState'; const SupersetClientGet = jest.spyOn(SupersetClient, 'get'); -const createProps = () => ({ +const createProps = (): DatabaseSelectorProps => ({ db: { id: 1, database_name: 'test', @@ -38,12 +39,10 @@ const createProps = () => ({ schema: undefined, sqlLabMode: true, getDbList: jest.fn(), - getTableList: jest.fn(), handleError: jest.fn(), onDbChange: jest.fn(), onSchemaChange: jest.fn(), onSchemasLoad: jest.fn(), - onUpdate: jest.fn(), }); beforeEach(() => { @@ -191,12 +190,10 @@ test('Refresh should work', async () => { await waitFor(() => { expect(SupersetClientGet).toBeCalledTimes(2); expect(props.getDbList).toBeCalledTimes(0); - expect(props.getTableList).toBeCalledTimes(0); expect(props.handleError).toBeCalledTimes(0); expect(props.onDbChange).toBeCalledTimes(0); expect(props.onSchemaChange).toBeCalledTimes(0); expect(props.onSchemasLoad).toBeCalledTimes(0); - expect(props.onUpdate).toBeCalledTimes(0); }); userEvent.click(screen.getByRole('button', { name: 'refresh' })); @@ -204,12 +201,10 @@ test('Refresh should work', async () => { await waitFor(() => { expect(SupersetClientGet).toBeCalledTimes(3); expect(props.getDbList).toBeCalledTimes(1); - expect(props.getTableList).toBeCalledTimes(0); expect(props.handleError).toBeCalledTimes(0); expect(props.onDbChange).toBeCalledTimes(0); expect(props.onSchemaChange).toBeCalledTimes(0); expect(props.onSchemasLoad).toBeCalledTimes(2); - expect(props.onUpdate).toBeCalledTimes(0); }); }); @@ -224,6 +219,28 @@ test('Should database select display options', async () => { expect(await screen.findByText('test-mysql')).toBeInTheDocument(); }); +test('should show empty state if there are no options', async () => { + SupersetClientGet.mockImplementation( + async () => ({ json: { result: [] } } as any), + ); + const props = createProps(); + render( + } + />, + { useRedux: true }, + ); + const select = screen.getByRole('combobox', { + name: 'Select database or type database name', + }); + userEvent.click(select); + const emptystate = await screen.findByText('empty'); + expect(emptystate).toBeInTheDocument(); + expect(screen.queryByText('test-mysql')).not.toBeInTheDocument(); +}); + test('Should schema select display options', async () => { const props = createProps(); render(, { useRedux: true }); diff --git a/superset-frontend/src/components/DatabaseSelector/index.tsx b/superset-frontend/src/components/DatabaseSelector/index.tsx index 531a7a9e7194c..718177a13956f 100644 --- a/superset-frontend/src/components/DatabaseSelector/index.tsx +++ b/superset-frontend/src/components/DatabaseSelector/index.tsx @@ -86,13 +86,15 @@ export type DatabaseObject = { type SchemaValue = { label: string; value: string }; -interface DatabaseSelectorProps { +export interface DatabaseSelectorProps { db?: DatabaseObject; + emptyState?: ReactNode; formMode?: boolean; getDbList?: (arg0: any) => {}; handleError: (msg: string) => void; isDatabaseSelectEnabled?: boolean; onDbChange?: (db: DatabaseObject) => void; + onEmptyResults?: (searchText?: string) => void; onSchemaChange?: (schema?: string) => void; onSchemasLoad?: (schemas: Array) => void; readOnly?: boolean; @@ -118,10 +120,12 @@ const SelectLabel = ({ export default function DatabaseSelector({ db, formMode = false, + emptyState, getDbList, handleError, isDatabaseSelectEnabled = true, onDbChange, + onEmptyResults, onSchemaChange, onSchemasLoad, readOnly = false, @@ -146,6 +150,7 @@ export default function DatabaseSelector({ ); const [refresh, setRefresh] = useState(0); const { addSuccessToast } = useToasts(); + const loadDatabases = useMemo( () => async ( @@ -181,7 +186,7 @@ export default function DatabaseSelector({ getDbList(result); } if (result.length === 0) { - handleError(t("It seems you don't have access to any database")); + if (onEmptyResults) onEmptyResults(search); } const options = result.map((row: DatabaseObject) => ({ label: ( @@ -197,13 +202,14 @@ export default function DatabaseSelector({ allow_multi_schema_metadata_fetch: row.allow_multi_schema_metadata_fetch, })); + return { data: options, totalCount: options.length, }; }); }, - [formMode, getDbList, handleError, sqlLabMode], + [formMode, getDbList, sqlLabMode], ); useEffect(() => { @@ -272,6 +278,7 @@ export default function DatabaseSelector({ data-test="select-database" header={{t('Database')}} lazyLoading={false} + notFoundContent={emptyState} onChange={changeDataBase} value={currentDb} placeholder={t('Select database or type database name')} @@ -289,11 +296,10 @@ export default function DatabaseSelector({ tooltipContent={t('Force refresh schema list')} /> ); - return renderSelectRow( } placeholder={t('Search')} onChange={(event: any) => { const filterText = event.target.value; debouncedChangeHandler(filterText); }} + css={css` + width: 200px; + margin-right: ${theme.gridUnit * 2}px; + `} /> ); }; @@ -250,7 +265,9 @@ export const useFilteredTableData = ( const rowsAsStrings = useMemo( () => data?.map((row: Record) => - Object.values(row).map(value => value?.toString().toLowerCase()), + Object.values(row).map(value => + value ? value.toString().toLowerCase() : t('N/A'), + ), ) ?? [], [data], ); diff --git a/superset-frontend/src/explore/components/DataTablesPane/DataTablesPane.test.tsx b/superset-frontend/src/explore/components/DataTablesPane/DataTablesPane.test.tsx index 9905d8f5c6d3c..786150449ee20 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/DataTablesPane.test.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/DataTablesPane.test.tsx @@ -21,7 +21,11 @@ import React from 'react'; import userEvent from '@testing-library/user-event'; import fetchMock from 'fetch-mock'; import * as copyUtils from 'src/utils/copy'; -import { render, screen } from 'spec/helpers/testing-library'; +import { + render, + screen, + waitForElementToBeRemoved, +} from 'spec/helpers/testing-library'; import { DataTablesPane } from '.'; const createProps = () => ({ @@ -50,7 +54,6 @@ const createProps = () => ({ sort_y_axis: 'alpha_asc', extra_form_data: {}, }, - tableSectionHeight: 156.9, chartStatus: 'rendered', onCollapseChange: jest.fn(), queriesResponse: [ @@ -60,91 +63,162 @@ const createProps = () => ({ ], }); -test('Rendering DataTablesPane correctly', () => { - const props = createProps(); - render(, { useRedux: true }); - expect(screen.getByTestId('some-purposeful-instance')).toBeVisible(); - expect(screen.getByRole('tablist')).toBeVisible(); - expect(screen.getByRole('tab', { name: 'right Data' })).toBeVisible(); - expect(screen.getByRole('img', { name: 'right' })).toBeVisible(); -}); +describe('DataTablesPane', () => { + // Collapsed/expanded state depends on local storage + // We need to clear it manually - otherwise initial state would depend on the order of tests + beforeEach(() => { + localStorage.clear(); + }); -test('Should show tabs', async () => { - const props = createProps(); - render(, { useRedux: true }); - expect(screen.queryByText('View results')).not.toBeInTheDocument(); - expect(screen.queryByText('View samples')).not.toBeInTheDocument(); - userEvent.click(await screen.findByText('Data')); - expect(await screen.findByText('View results')).toBeVisible(); - expect(screen.getByText('View samples')).toBeVisible(); -}); + afterAll(() => { + localStorage.clear(); + }); -test('Should show tabs: View results', async () => { - const props = createProps(); - render(, { - useRedux: true, + test('Rendering DataTablesPane correctly', () => { + const props = createProps(); + render(, { useRedux: true }); + expect(screen.getByText('Results')).toBeVisible(); + expect(screen.getByText('Samples')).toBeVisible(); + expect(screen.getByLabelText('Expand data panel')).toBeVisible(); }); - userEvent.click(await screen.findByText('Data')); - userEvent.click(await screen.findByText('View results')); - expect(screen.getByText('0 rows retrieved')).toBeVisible(); -}); -test('Should show tabs: View samples', async () => { - const props = createProps(); - render(, { - useRedux: true, + test('Collapse/Expand buttons', async () => { + const props = createProps(); + render(, { + useRedux: true, + }); + expect( + screen.queryByLabelText('Collapse data panel'), + ).not.toBeInTheDocument(); + userEvent.click(screen.getByLabelText('Expand data panel')); + expect(await screen.findByLabelText('Collapse data panel')).toBeVisible(); + expect( + screen.queryByLabelText('Expand data panel'), + ).not.toBeInTheDocument(); }); - userEvent.click(await screen.findByText('Data')); - expect(screen.queryByText('0 rows retrieved')).not.toBeInTheDocument(); - userEvent.click(await screen.findByText('View samples')); - expect(await screen.findByText('0 rows retrieved')).toBeVisible(); -}); -test('Should copy data table content correctly', async () => { - fetchMock.post( - 'glob:*/api/v1/chart/data?form_data=%7B%22slice_id%22%3A456%7D', - { - result: [ - { - data: [{ __timestamp: 1230768000000, genre: 'Action' }], - colnames: ['__timestamp', 'genre'], - coltypes: [2, 1], + test('Should show tabs: View results', async () => { + const props = createProps(); + render(, { + useRedux: true, + }); + userEvent.click(screen.getByText('Results')); + expect(await screen.findByText('0 rows retrieved')).toBeVisible(); + expect(await screen.findByLabelText('Collapse data panel')).toBeVisible(); + localStorage.clear(); + }); + + test('Should show tabs: View samples', async () => { + const props = createProps(); + render(, { + useRedux: true, + }); + userEvent.click(screen.getByText('Samples')); + expect(await screen.findByText('0 rows retrieved')).toBeVisible(); + expect(await screen.findByLabelText('Collapse data panel')).toBeVisible(); + }); + + test('Should copy data table content correctly', async () => { + fetchMock.post( + 'glob:*/api/v1/chart/data?form_data=%7B%22slice_id%22%3A456%7D', + { + result: [ + { + data: [{ __timestamp: 1230768000000, genre: 'Action' }], + colnames: ['__timestamp', 'genre'], + coltypes: [2, 1], + }, + ], + }, + ); + const copyToClipboardSpy = jest.spyOn(copyUtils, 'default'); + const props = createProps(); + render( + , + { + useRedux: true, + initialState: { + explore: { + timeFormattedColumns: { + '34__table': ['__timestamp'], + }, + }, }, - ], - }, - ); - const copyToClipboardSpy = jest.spyOn(copyUtils, 'default'); - const props = createProps(); - render( - { + fetchMock.post( + 'glob:*/api/v1/chart/data?form_data=%7B%22slice_id%22%3A456%7D', + { + result: [ { + data: [ + { __timestamp: 1230768000000, genre: 'Action' }, + { __timestamp: 1230768000010, genre: 'Horror' }, + ], colnames: ['__timestamp', 'genre'], coltypes: [2, 1], }, ], - }} - />, - { - useRedux: true, - initialState: { - explore: { - timeFormattedColumns: { - '34__table': ['__timestamp'], + }, + ); + const props = createProps(); + render( + , + { + useRedux: true, + initialState: { + explore: { + timeFormattedColumns: { + '34__table': ['__timestamp'], + }, }, }, }, - }, - ); - userEvent.click(await screen.findByText('Data')); - expect(await screen.findByText('1 rows retrieved')).toBeVisible(); + ); + userEvent.click(screen.getByText('Results')); + expect(await screen.findByText('2 rows retrieved')).toBeVisible(); + expect(screen.getByText('Action')).toBeVisible(); + expect(screen.getByText('Horror')).toBeVisible(); - userEvent.click(screen.getByRole('button', { name: 'Copy' })); - expect(copyToClipboardSpy).toHaveBeenCalledWith( - '2009-01-01 00:00:00\tAction\n', - ); - fetchMock.done(); + userEvent.type(screen.getByPlaceholderText('Search'), 'hor'); + + await waitForElementToBeRemoved(() => screen.queryByText('Action')); + expect(screen.getByText('Horror')).toBeVisible(); + expect(screen.queryByText('Action')).not.toBeInTheDocument(); + fetchMock.restore(); + }); }); diff --git a/superset-frontend/src/explore/components/DataTablesPane/index.tsx b/superset-frontend/src/explore/components/DataTablesPane/index.tsx index 5d935caa63ddd..a41af3626f1e4 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/index.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/index.tsx @@ -16,15 +16,23 @@ * specific language governing permissions and limitations * under the License. */ -import React, { useCallback, useEffect, useMemo, useState } from 'react'; +import React, { + useCallback, + useEffect, + useMemo, + useState, + MouseEvent, +} from 'react'; import { + css, ensureIsArray, GenericDataType, JsonObject, styled, t, + useTheme, } from '@superset-ui/core'; -import Collapse from 'src/components/Collapse'; +import Icons from 'src/components/Icons'; import Tabs from 'src/components/Tabs'; import Loading from 'src/components/Loading'; import { EmptyStateMedium } from 'src/components/EmptyState'; @@ -58,53 +66,58 @@ const getDefaultDataTablesState = (value: any) => ({ const DATA_TABLE_PAGE_SIZE = 50; -const DATAPANEL_KEY = 'data'; - const TableControlsWrapper = styled.div` - display: flex; - align-items: center; - - span { - flex-shrink: 0; - } + ${({ theme }) => ` + display: flex; + align-items: center; + justify-content: space-between; + margin-bottom: ${theme.gridUnit * 2}px; + + span { + flex-shrink: 0; + } + `} `; const SouthPane = styled.div` - position: relative; - background-color: ${({ theme }) => theme.colors.grayscale.light5}; - z-index: 5; - overflow: hidden; -`; - -const TabsWrapper = styled.div<{ contentHeight: number }>` - height: ${({ contentHeight }) => contentHeight}px; - overflow: hidden; + ${({ theme }) => ` + position: relative; + background-color: ${theme.colors.grayscale.light5}; + z-index: 5; + overflow: hidden; - .table-condensed { - height: 100%; - overflow: auto; - } -`; + .ant-tabs { + height: 100%; + } -const CollapseWrapper = styled.div` - height: 100%; + .ant-tabs-content-holder { + height: 100%; + } - .collapse-inner { - height: 100%; + .ant-tabs-content { + height: 100%; + } - .ant-collapse-item { + .ant-tabs-tabpane { + display: flex; + flex-direction: column; height: 100%; - .ant-collapse-content { - height: calc(100% - ${({ theme }) => theme.gridUnit * 8}px); + .table-condensed { + height: 100%; + overflow: auto; + margin-bottom: ${theme.gridUnit * 4}px; - .ant-collapse-content-box { - padding-top: 0; - height: 100%; + .table { + margin-bottom: ${theme.gridUnit * 2}px; } } + + .pagination-container > ul[role='navigation'] { + margin-top: 0; + } } - } + `} `; const Error = styled.pre` @@ -117,7 +130,6 @@ interface DataTableProps { datasource: string | undefined; filterText: string; data: object[] | undefined; - timeFormattedColumns: string[] | undefined; isLoading: boolean; error: string | undefined; errorMessage: React.ReactElement | undefined; @@ -130,12 +142,12 @@ const DataTable = ({ datasource, filterText, data, - timeFormattedColumns, isLoading, error, errorMessage, type, }: DataTableProps) => { + const timeFormattedColumns = useTimeFormattedColumns(datasource); // this is to preserve the order of the columns, even if there are integer values, // while also only grabbing the first column's keys const columns = useTableColumns( @@ -185,9 +197,42 @@ const DataTable = ({ return null; }; +const TableControls = ({ + data, + datasourceId, + onInputChange, + columnNames, + isLoading, +}: { + data: Record[]; + datasourceId?: string; + onInputChange: (input: string) => void; + columnNames: string[]; + isLoading: boolean; +}) => { + const timeFormattedColumns = useTimeFormattedColumns(datasourceId); + const formattedData = useMemo( + () => applyFormattingToTabularData(data, timeFormattedColumns), + [data, timeFormattedColumns], + ); + return ( + + +
+ + +
+
+ ); +}; + export const DataTablesPane = ({ queryFormData, - tableSectionHeight, onCollapseChange, chartStatus, ownState, @@ -195,19 +240,19 @@ export const DataTablesPane = ({ queriesResponse, }: { queryFormData: Record; - tableSectionHeight: number; chartStatus: string; ownState?: JsonObject; - onCollapseChange: (openPanelName: string) => void; + onCollapseChange: (isOpen: boolean) => void; errorMessage?: JSX.Element; queriesResponse: Record; }) => { + const theme = useTheme(); const [data, setData] = useState(getDefaultDataTablesState(undefined)); const [isLoading, setIsLoading] = useState(getDefaultDataTablesState(true)); const [columnNames, setColumnNames] = useState(getDefaultDataTablesState([])); const [columnTypes, setColumnTypes] = useState(getDefaultDataTablesState([])); const [error, setError] = useState(getDefaultDataTablesState('')); - const [filterText, setFilterText] = useState(''); + const [filterText, setFilterText] = useState(getDefaultDataTablesState('')); const [activeTabKey, setActiveTabKey] = useState( RESULT_TYPES.results, ); @@ -218,24 +263,6 @@ export const DataTablesPane = ({ getItem(LocalStorageKeys.is_datapanel_open, false), ); - const timeFormattedColumns = useTimeFormattedColumns( - queryFormData?.datasource, - ); - - const formattedData = useMemo( - () => ({ - [RESULT_TYPES.results]: applyFormattingToTabularData( - data[RESULT_TYPES.results], - timeFormattedColumns, - ), - [RESULT_TYPES.samples]: applyFormattingToTabularData( - data[RESULT_TYPES.samples], - timeFormattedColumns, - ), - }), - [data, timeFormattedColumns], - ); - const getData = useCallback( (resultType: 'samples' | 'results') => { setIsLoading(prevIsLoading => ({ @@ -381,81 +408,121 @@ export const DataTablesPane = ({ errorMessage, ]); - const TableControls = ( - - - - - + const handleCollapseChange = useCallback( + (isOpen: boolean) => { + onCollapseChange(isOpen); + setPanelOpen(isOpen); + }, + [onCollapseChange], ); - const handleCollapseChange = (openPanelName: string) => { - onCollapseChange(openPanelName); - setPanelOpen(!!openPanelName); - }; + const handleTabClick = useCallback( + (tabKey: string, e: MouseEvent) => { + if (!panelOpen) { + handleCollapseChange(true); + } else if (tabKey === activeTabKey) { + e.preventDefault(); + handleCollapseChange(false); + } + setActiveTabKey(tabKey); + }, + [activeTabKey, handleCollapseChange, panelOpen], + ); + + const CollapseButton = useMemo(() => { + const caretIcon = panelOpen ? ( + + ) : ( + + ); + return ( + + {panelOpen ? ( + handleCollapseChange(false)} + > + {caretIcon} + + ) : ( + handleCollapseChange(true)} + > + {caretIcon} + + )} + + ); + }, [handleCollapseChange, panelOpen, theme.colors.grayscale.base]); return ( - - - - - - - - - - - - - - - - + + + + setFilterText(prevState => ({ + ...prevState, + [RESULT_TYPES.results]: input, + })) + } + isLoading={isLoading[RESULT_TYPES.results]} + /> + + + + + setFilterText(prevState => ({ + ...prevState, + [RESULT_TYPES.samples]: input, + })) + } + isLoading={isLoading[RESULT_TYPES.samples]} + /> + + + ); }; diff --git a/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx index f02ab01622593..fa9b54acf5025 100644 --- a/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx @@ -370,31 +370,35 @@ const ExploreAdditionalActionsMenu = ({ - {canAddReports && - (report ? ( - - - - - {t('Email reports active')} - - - - {t('Edit email report')} - - - {t('Delete email report')} + {canAddReports && ( + <> + {report ? ( + + + + + {t('Email reports active')} + + + + {t('Edit email report')} + + + {t('Delete email report')} + + + ) : ( + + {t('Set up an email report')} - - ) : ( - - {t('Set up an email report')} - - ))} - + )} + + + )} + void; + secondaryButtonAction?: (e: React.MouseEvent) => void; + primaryButtonText?: string; + secondaryButtonText?: string; + type: 'info' | 'warning'; + className?: string; +} + +const AlertContainer = styled.div` + ${({ theme }) => css` + margin: ${theme.gridUnit * 4}px; + padding: ${theme.gridUnit * 4}px; + + border: 1px solid ${theme.colors.info.base}; + background-color: ${theme.colors.info.light2}; + border-radius: 2px; + + color: ${theme.colors.info.dark2}; + font-size: ${theme.typography.sizes.m}px; + + p { + margin-bottom: ${theme.gridUnit}px; + } + + & a, + & span[role='button'] { + color: inherit; + text-decoration: underline; + &:hover { + color: ${theme.colors.info.dark1}; + } + } + + &.alert-type-warning { + border-color: ${theme.colors.alert.base}; + background-color: ${theme.colors.alert.light2}; + + p { + color: ${theme.colors.alert.dark2}; + } + + & a:hover, + & span[role='button']:hover { + color: ${theme.colors.alert.dark1}; + } + } + `} +`; + +const ButtonContainer = styled.div` + display: flex; + justify-content: flex-end; + button { + line-height: 1; + } +`; + +const Title = styled.p` + font-weight: ${({ theme }) => theme.typography.weights.bold}; +`; + +export const ExploreAlert = forwardRef( + ( + { + title, + bodyText, + primaryButtonAction, + secondaryButtonAction, + primaryButtonText, + secondaryButtonText, + type = 'info', + className = '', + }: ControlPanelAlertProps, + ref: RefObject, + ) => ( + + {title} +

{bodyText}

+ {primaryButtonText && primaryButtonAction && ( + + {secondaryButtonAction && secondaryButtonText && ( + + )} + + + )} +
+ ), +); diff --git a/superset-frontend/src/explore/components/ExploreChartPanel.jsx b/superset-frontend/src/explore/components/ExploreChartPanel.jsx index 8fb1c3ef073d9..5cd818f52ee89 100644 --- a/superset-frontend/src/explore/components/ExploreChartPanel.jsx +++ b/superset-frontend/src/explore/components/ExploreChartPanel.jsx @@ -19,7 +19,14 @@ import React, { useState, useEffect, useCallback, useMemo } from 'react'; import PropTypes from 'prop-types'; import Split from 'react-split'; -import { styled, SupersetClient, useTheme } from '@superset-ui/core'; +import { + css, + ensureIsArray, + styled, + SupersetClient, + t, + useTheme, +} from '@superset-ui/core'; import { useResizeDetector } from 'react-resize-detector'; import { chartPropShape } from 'src/dashboard/util/propShapes'; import ChartContainer from 'src/components/Chart/ChartContainer'; @@ -31,6 +38,8 @@ import { import { DataTablesPane } from './DataTablesPane'; import { buildV1ChartDataPayload } from '../exploreUtils'; import { ChartPills } from './ChartPills'; +import { ExploreAlert } from './ExploreAlert'; +import { getChartRequiredFieldsMissingMessage } from '../../utils/getChartRequiredFieldsMissingMessage'; const propTypes = { actions: PropTypes.object.isRequired, @@ -41,8 +50,6 @@ const propTypes = { dashboardId: PropTypes.number, column_formats: PropTypes.object, containerId: PropTypes.string.isRequired, - height: PropTypes.string.isRequired, - width: PropTypes.string.isRequired, isStarred: PropTypes.bool.isRequired, slice: PropTypes.object, sliceName: PropTypes.string, @@ -53,7 +60,7 @@ const propTypes = { standalone: PropTypes.number, force: PropTypes.bool, timeout: PropTypes.number, - refreshOverlayVisible: PropTypes.bool, + chartIsStale: PropTypes.bool, chart: chartPropShape, errorMessage: PropTypes.node, triggerRender: PropTypes.bool, @@ -61,11 +68,8 @@ const propTypes = { const GUTTER_SIZE_FACTOR = 1.25; -const CHART_PANEL_PADDING_HORIZ = 30; -const CHART_PANEL_PADDING_VERTICAL = 15; - -const INITIAL_SIZES = [90, 10]; -const MIN_SIZES = [300, 50]; +const INITIAL_SIZES = [100, 0]; +const MIN_SIZES = [300, 65]; const DEFAULT_SOUTH_PANE_HEIGHT_PERCENT = 40; const Styles = styled.div` @@ -109,28 +113,50 @@ const Styles = styled.div` } `; -const ExploreChartPanel = props => { +const ExploreChartPanel = ({ + chart, + slice, + vizType, + ownState, + triggerRender, + force, + datasource, + errorMessage, + form_data: formData, + onQuery, + actions, + timeout, + standalone, + chartIsStale, + chartAlert, +}) => { const theme = useTheme(); const gutterMargin = theme.gridUnit * GUTTER_SIZE_FACTOR; const gutterHeight = theme.gridUnit * GUTTER_SIZE_FACTOR; - const { width: chartPanelWidth, ref: chartPanelRef } = useResizeDetector({ + const { + width: chartPanelWidth, + height: chartPanelHeight, + ref: chartPanelRef, + } = useResizeDetector({ refreshMode: 'debounce', refreshRate: 300, }); - const { height: pillsHeight, ref: pillsRef } = useResizeDetector({ - refreshMode: 'debounce', - refreshRate: 1000, - }); const [splitSizes, setSplitSizes] = useState( getItem(LocalStorageKeys.chart_split_sizes, INITIAL_SIZES), ); - const { slice } = props; + + const showAlertBanner = + !chartAlert && + chartIsStale && + chart.chartStatus !== 'failed' && + ensureIsArray(chart.queriesResponse).length > 0; + const updateQueryContext = useCallback( async function fetchChartData() { if (slice && slice.query_context === null) { const queryContext = buildV1ChartDataPayload({ formData: slice.form_data, - force: props.force, + force, resultFormat: 'json', resultType: 'full', setDataMask: null, @@ -154,56 +180,28 @@ const ExploreChartPanel = props => { updateQueryContext(); }, [updateQueryContext]); - const calcSectionHeight = useCallback( - percent => { - let containerHeight = parseInt(props.height, 10); - if (pillsHeight) { - containerHeight -= pillsHeight; - } - return ( - (containerHeight * percent) / 100 - (gutterHeight / 2 + gutterMargin) - ); - }, - [gutterHeight, gutterMargin, pillsHeight, props.height, props.standalone], - ); - - const [tableSectionHeight, setTableSectionHeight] = useState( - calcSectionHeight(INITIAL_SIZES[1]), - ); - - const recalcPanelSizes = useCallback( - ([, southPercent]) => { - setTableSectionHeight(calcSectionHeight(southPercent)); - }, - [calcSectionHeight], - ); - - useEffect(() => { - recalcPanelSizes(splitSizes); - }, [recalcPanelSizes, splitSizes]); - useEffect(() => { setItem(LocalStorageKeys.chart_split_sizes, splitSizes); }, [splitSizes]); - const onDragEnd = sizes => { + const onDragEnd = useCallback(sizes => { setSplitSizes(sizes); - }; + }, []); - const refreshCachedQuery = () => { - props.actions.postChartFormData( - props.form_data, + const refreshCachedQuery = useCallback(() => { + actions.postChartFormData( + formData, true, - props.timeout, - props.chart.id, + timeout, + chart.id, undefined, - props.ownState, + ownState, ); - }; + }, [actions, chart.id, formData, ownState, timeout]); - const onCollapseChange = openPanelName => { + const onCollapseChange = useCallback(isOpen => { let splitSizes; - if (!openPanelName) { + if (!isOpen) { splitSizes = INITIAL_SIZES; } else { splitSizes = [ @@ -212,68 +210,135 @@ const ExploreChartPanel = props => { ]; } setSplitSizes(splitSizes); - }; - const renderChart = useCallback(() => { - const { chart, vizType } = props; - const newHeight = - vizType === 'filter_box' - ? calcSectionHeight(100) - CHART_PANEL_PADDING_VERTICAL - : calcSectionHeight(splitSizes[0]) - CHART_PANEL_PADDING_VERTICAL; - const chartWidth = chartPanelWidth - CHART_PANEL_PADDING_HORIZ; - return ( - chartWidth > 0 && ( - - ) - ); - }, [calcSectionHeight, chartPanelWidth, props, splitSizes]); + }, []); + + const renderChart = useCallback( + () => ( +
+ {chartPanelWidth && chartPanelHeight && ( + + )} +
+ ), + [ + actions.setControlValue, + chart.annotationData, + chart.chartAlert, + chart.chartStackTrace, + chart.chartStatus, + chart.id, + chart.latestQueryFormData, + chart.queriesResponse, + chart.triggerQuery, + chartIsStale, + chartPanelHeight, + chartPanelRef, + chartPanelWidth, + datasource, + errorMessage, + force, + formData, + onQuery, + ownState, + timeout, + triggerRender, + vizType, + ], + ); const panelBody = useMemo( () => ( -
+
+ {showAlertBanner && ( + + {t( + 'You updated the values in the control panel, but the chart was not updated automatically. Run the query by clicking on the "Update chart" button or', + )}{' '} + + {t('click here')} + + . + + ) + } + type="warning" + css={theme => css` + margin: 0 0 ${theme.gridUnit * 4}px 0; + `} + /> + )} {renderChart()}
), - [chartPanelRef, renderChart], + [ + showAlertBanner, + errorMessage, + onQuery, + chart.queriesResponse, + chart.chartStatus, + chart.chartUpdateStartTime, + chart.chartUpdateEndTime, + refreshCachedQuery, + formData?.row_limit, + renderChart, + ], ); - const standaloneChartBody = useMemo( - () =>
{renderChart()}
, - [chartPanelRef, renderChart], - ); + const standaloneChartBody = useMemo(() => renderChart(), [renderChart]); - const [queryFormData, setQueryFormData] = useState( - props.chart.latestQueryFormData, - ); + const [queryFormData, setQueryFormData] = useState(chart.latestQueryFormData); useEffect(() => { // only update when `latestQueryFormData` changes AND `triggerRender` @@ -281,13 +346,20 @@ const ExploreChartPanel = props => { // as this can trigger a query downstream based on incomplete form data. // (`latestQueryFormData` is only updated when a a valid request has been // triggered). - if (!props.triggerRender) { - setQueryFormData(props.chart.latestQueryFormData); + if (!triggerRender) { + setQueryFormData(chart.latestQueryFormData); } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [props.chart.latestQueryFormData]); + }, [chart.latestQueryFormData]); - if (props.standalone) { + const elementStyle = useCallback( + (dimension, elementSize, gutterSize) => ({ + [dimension]: `calc(${elementSize}% - ${gutterSize + gutterMargin}px)`, + }), + [gutterMargin], + ); + + if (standalone) { // dom manipulation hack to get rid of the boostrap theme's body background const standaloneClass = 'background-transparent'; const bodyClasses = document.body.className.split(' '); @@ -297,13 +369,9 @@ const ExploreChartPanel = props => { return standaloneChartBody; } - const elementStyle = (dimension, elementSize, gutterSize) => ({ - [dimension]: `calc(${elementSize}% - ${gutterSize + gutterMargin}px)`, - }); - return ( - - {props.vizType === 'filter_box' ? ( + + {vizType === 'filter_box' ? ( panelBody ) : ( { gutterSize={gutterHeight} onDragEnd={onDragEnd} elementStyle={elementStyle} + expandToMin > {panelBody} )} diff --git a/superset-frontend/src/explore/components/ExploreChartPanel.test.jsx b/superset-frontend/src/explore/components/ExploreChartPanel.test.jsx index c50a605a40aae..a779773052e69 100644 --- a/superset-frontend/src/explore/components/ExploreChartPanel.test.jsx +++ b/superset-frontend/src/explore/components/ExploreChartPanel.test.jsx @@ -17,23 +17,70 @@ * under the License. */ import React from 'react'; - +import { render, screen } from 'spec/helpers/testing-library'; import ChartContainer from 'src/explore/components/ExploreChartPanel'; -describe('ChartContainer', () => { - const mockProps = { - sliceName: 'Trend Line', - vizType: 'line', - height: '500px', - actions: {}, - can_overwrite: false, - can_download: false, - containerId: 'foo', - width: '50px', - isStarred: false, - }; +const createProps = (overrides = {}) => ({ + sliceName: 'Trend Line', + vizType: 'line', + height: '500px', + actions: {}, + can_overwrite: false, + can_download: false, + containerId: 'foo', + width: '500px', + isStarred: false, + chartIsStale: false, + chart: {}, + form_data: {}, + ...overrides, +}); +describe('ChartContainer', () => { it('renders when vizType is line', () => { - expect(React.isValidElement()).toBe(true); + const props = createProps(); + expect(React.isValidElement()).toBe(true); + }); + + it('renders with alert banner', () => { + const props = createProps({ + chartIsStale: true, + chart: { chartStatus: 'rendered', queriesResponse: [{}] }, + }); + render(, { useRedux: true }); + expect(screen.getByText('Your chart is not up to date')).toBeVisible(); + }); + + it('doesnt render alert banner when no changes in control panel were made (chart is not stale)', () => { + const props = createProps({ + chartIsStale: false, + }); + render(, { useRedux: true }); + expect( + screen.queryByText('Your chart is not up to date'), + ).not.toBeInTheDocument(); + }); + + it('doesnt render alert banner when chart not created yet (no queries response)', () => { + const props = createProps({ + chartIsStale: true, + chart: { queriesResponse: [] }, + }); + render(, { useRedux: true }); + expect( + screen.queryByText('Your chart is not up to date'), + ).not.toBeInTheDocument(); + }); + + it('renders prompt to fill required controls when required control removed', () => { + const props = createProps({ + chartIsStale: true, + chart: { chartStatus: 'rendered', queriesResponse: [{}] }, + errorMessage: 'error', + }); + render(, { useRedux: true }); + expect( + screen.getByText('Required control values have been removed'), + ).toBeVisible(); }); }); diff --git a/superset-frontend/src/explore/components/ExploreViewContainer/ExploreViewContainer.test.tsx b/superset-frontend/src/explore/components/ExploreViewContainer/ExploreViewContainer.test.tsx index a240578c49fc5..7743997a35529 100644 --- a/superset-frontend/src/explore/components/ExploreViewContainer/ExploreViewContainer.test.tsx +++ b/superset-frontend/src/explore/components/ExploreViewContainer/ExploreViewContainer.test.tsx @@ -27,7 +27,10 @@ import ExploreViewContainer from '.'; const reduxState = { explore: { common: { conf: { SUPERSET_WEBSERVER_TIMEOUT: 60 } }, - controls: { datasource: { value: '1__table' } }, + controls: { + datasource: { value: '1__table' }, + viz_type: { value: 'table' }, + }, datasource: { id: 1, type: 'table', diff --git a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx index b856ba706cf88..da18dcc4ff5c5 100644 --- a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx +++ b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx @@ -22,7 +22,7 @@ import PropTypes from 'prop-types'; import { bindActionCreators } from 'redux'; import { connect } from 'react-redux'; import { styled, t, css, useTheme, logging } from '@superset-ui/core'; -import { debounce } from 'lodash'; +import { debounce, pick } from 'lodash'; import { Resizable } from 're-resizable'; import { useChangeEffect } from 'src/hooks/useChangeEffect'; import { usePluginContext } from 'src/components/DynamicPlugins'; @@ -63,8 +63,6 @@ import ConnectedExploreChartHeader from '../ExploreChartHeader'; const propTypes = { ...ExploreChartPanel.propTypes, - height: PropTypes.string, - width: PropTypes.string, actions: PropTypes.object.isRequired, datasource_type: PropTypes.string.isRequired, dashboardId: PropTypes.number, @@ -135,6 +133,7 @@ const ExplorePanelContainer = styled.div` flex: 1; min-width: ${theme.gridUnit * 128}px; border-left: 1px solid ${theme.colors.grayscale.light2}; + padding: 0 ${theme.gridUnit * 4}px; .panel { margin-bottom: 0; } @@ -172,23 +171,6 @@ const ExplorePanelContainer = styled.div` `}; `; -const getWindowSize = () => ({ - height: window.innerHeight, - width: window.innerWidth, -}); - -function useWindowSize({ delayMs = 250 } = {}) { - const [size, setSize] = useState(getWindowSize()); - - useEffect(() => { - const onWindowResize = debounce(() => setSize(getWindowSize()), delayMs); - window.addEventListener('resize', onWindowResize); - return () => window.removeEventListener('resize', onWindowResize); - }, []); - - return size; -} - const updateHistory = debounce( async (formData, datasetId, isReplace, standalone, force, title, tabId) => { const payload = { ...formData }; @@ -246,7 +228,6 @@ function ExploreViewContainer(props) { const [lastQueriedControls, setLastQueriedControls] = useState( props.controls, ); - const windowSize = useWindowSize(); const [showingModal, setShowingModal] = useState(false); const [isCollapsed, setIsCollapsed] = useState(false); @@ -254,11 +235,6 @@ function ExploreViewContainer(props) { const tabId = useTabId(); const theme = useTheme(); - const width = `${windowSize.width}px`; - const navHeight = props.standalone ? 0 : 120; - const height = props.forcedHeight - ? `${props.forcedHeight}px` - : `${windowSize.height - navHeight}px`; const defaultSidebarsWidth = { controls_width: 320, @@ -405,18 +381,33 @@ function ExploreViewContainer(props) { } }, []); - const reRenderChart = () => { - props.actions.updateQueryFormData( - getFormDataFromControls(props.controls), + const reRenderChart = useCallback( + controlsChanged => { + const newQueryFormData = controlsChanged + ? { + ...props.chart.latestQueryFormData, + ...getFormDataFromControls(pick(props.controls, controlsChanged)), + } + : getFormDataFromControls(props.controls); + props.actions.updateQueryFormData(newQueryFormData, props.chart.id); + props.actions.renderTriggered(new Date().getTime(), props.chart.id); + addHistory(); + }, + [ + addHistory, + props.actions, props.chart.id, - ); - props.actions.renderTriggered(new Date().getTime(), props.chart.id); - addHistory(); - }; + props.chart.latestQueryFormData, + props.controls, + ], + ); // effect to run when controls change useEffect(() => { - if (previousControls) { + if ( + previousControls && + props.chart.latestQueryFormData.viz_type === props.controls.viz_type.value + ) { if ( props.controls.datasource && (previousControls.datasource == null || @@ -436,11 +427,11 @@ function ExploreViewContainer(props) { ); // this should also be handled by the actions that are actually changing the controls - const hasDisplayControlChanged = changedControlKeys.some( + const displayControlsChanged = changedControlKeys.filter( key => props.controls[key].renderTrigger, ); - if (hasDisplayControlChanged) { - reRenderChart(); + if (displayControlsChanged.length > 0) { + reRenderChart(displayControlsChanged); } } }, [props.controls, props.ownState]); @@ -515,11 +506,9 @@ function ExploreViewContainer(props) { function renderChartContainer() { return ( ); diff --git a/superset-frontend/src/explore/components/controls/CheckboxControl.jsx b/superset-frontend/src/explore/components/controls/CheckboxControl.jsx index 5b14f0d52f772..a570bbfed6922 100644 --- a/superset-frontend/src/explore/components/controls/CheckboxControl.jsx +++ b/superset-frontend/src/explore/components/controls/CheckboxControl.jsx @@ -18,6 +18,7 @@ */ import React from 'react'; import PropTypes from 'prop-types'; +import { styled, css } from '@superset-ui/core'; import ControlHeader from '../ControlHeader'; import Checkbox from '../../../components/Checkbox'; @@ -32,7 +33,16 @@ const defaultProps = { onChange: () => {}, }; -const checkboxStyle = { paddingRight: '5px' }; +const CheckBoxControlWrapper = styled.div` + ${({ theme }) => css` + .ControlHeader label { + color: ${theme.colors.grayscale.dark1}; + } + span[role='checkbox'] { + padding-right: ${theme.gridUnit * 2}px; + } + `} +`; export default class CheckboxControl extends React.Component { onChange() { @@ -43,7 +53,6 @@ export default class CheckboxControl extends React.Component { return ( ); @@ -52,11 +61,13 @@ export default class CheckboxControl extends React.Component { render() { if (this.props.label) { return ( - + + + ); } return this.renderCheckbox(); diff --git a/superset-frontend/src/explore/components/controls/SliderControl.tsx b/superset-frontend/src/explore/components/controls/SliderControl.tsx index 5907e26ba8348..a2d3b7c2bced1 100644 --- a/superset-frontend/src/explore/components/controls/SliderControl.tsx +++ b/superset-frontend/src/explore/components/controls/SliderControl.tsx @@ -18,19 +18,50 @@ */ import React from 'react'; import Slider from 'src/components/Slider'; -import ControlHeader from 'src/explore/components/ControlHeader'; +import ControlHeader, { + ControlHeaderProps, +} from 'src/explore/components/ControlHeader'; -type SliderControlProps = { +type SliderControlProps = ControlHeaderProps & { onChange: (value: number) => void; value: number; default?: number; }; -export default function SliderControl(props: SliderControlProps) { - const { onChange = () => {}, default: defaultValue, ...rest } = props; +export default function SliderControl({ + default: defaultValue, + name, + label, + description, + renderTrigger, + rightNode, + leftNode, + validationErrors, + hovered, + warning, + danger, + onClick, + tooltipOnClick, + onChange = () => {}, + ...rest +}: SliderControlProps) { + const headerProps = { + name, + label, + description, + renderTrigger, + rightNode, + leftNode, + validationErrors, + onClick, + hovered, + tooltipOnClick, + warning, + danger, + }; return ( <> - + ); diff --git a/superset-frontend/src/explore/controlPanels/sections.tsx b/superset-frontend/src/explore/controlPanels/sections.tsx index a1c786a73c15d..a6adbf3af23c3 100644 --- a/superset-frontend/src/explore/controlPanels/sections.tsx +++ b/superset-frontend/src/explore/controlPanels/sections.tsx @@ -132,7 +132,7 @@ export const NVD3TimeSeries: ControlPanelSectionConfig[] = [ 'of query results', ), controlSetRows: [ - [

{t('Rolling window')}

], + [
{t('Rolling window')}
], [ { name: 'rolling_type', @@ -181,7 +181,7 @@ export const NVD3TimeSeries: ControlPanelSectionConfig[] = [ }, }, ], - [

{t('Time comparison')}

], + [
{t('Time comparison')}
], [ { name: 'time_compare', @@ -230,9 +230,7 @@ export const NVD3TimeSeries: ControlPanelSectionConfig[] = [ }, }, ], - [

{t('Python functions')}

], - // eslint-disable-next-line jsx-a11y/heading-has-content - [

pandas.resample

], + [
{t('Resample')}
], [ { name: 'resample_rule', diff --git a/superset-frontend/src/explore/controlUtils/getFormDataFromControls.ts b/superset-frontend/src/explore/controlUtils/getFormDataFromControls.ts index f5ffa523c359e..ba9419da18737 100644 --- a/superset-frontend/src/explore/controlUtils/getFormDataFromControls.ts +++ b/superset-frontend/src/explore/controlUtils/getFormDataFromControls.ts @@ -22,13 +22,10 @@ import { ControlStateMapping } from '@superset-ui/chart-controls'; export function getFormDataFromControls( controlsState: ControlStateMapping, ): QueryFormData { - const formData: QueryFormData = { - viz_type: 'table', - datasource: '', - }; + const formData = {}; Object.keys(controlsState).forEach(controlName => { const control = controlsState[controlName]; formData[controlName] = control.value; }); - return formData; + return formData as QueryFormData; } diff --git a/superset-frontend/src/explore/main.less b/superset-frontend/src/explore/main.less index d85e855b4d2cc..015a8a1a3bed3 100644 --- a/superset-frontend/src/explore/main.less +++ b/superset-frontend/src/explore/main.less @@ -127,18 +127,11 @@ } } -h1.section-header { - font-size: @font-size-m; - font-weight: @font-weight-bold; - margin-bottom: 0; - margin-top: 0; - padding-bottom: 5px; -} - -h2.section-header { +div.section-header { font-size: @font-size-s; font-weight: @font-weight-bold; + color: @gray-light5; margin-bottom: 0; margin-top: 0; - padding-bottom: 5px; + padding-bottom: 16px; } diff --git a/superset-frontend/src/utils/getChartRequiredFieldsMissingMessage.ts b/superset-frontend/src/utils/getChartRequiredFieldsMissingMessage.ts new file mode 100644 index 0000000000000..ac11e8503dc2f --- /dev/null +++ b/superset-frontend/src/utils/getChartRequiredFieldsMissingMessage.ts @@ -0,0 +1,26 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { t } from '@superset-ui/core'; + +export const getChartRequiredFieldsMissingMessage = (isCreating: boolean) => + t( + 'Select values in highlighted field(s) in the control panel. Then run the query by clicking on the %s button.', + isCreating ? '"Create chart"' : '"Update chart"', + ); diff --git a/superset-frontend/src/utils/urlUtils.ts b/superset-frontend/src/utils/urlUtils.ts index be857517e06d0..bd570291f2cba 100644 --- a/superset-frontend/src/utils/urlUtils.ts +++ b/superset-frontend/src/utils/urlUtils.ts @@ -154,11 +154,15 @@ export function getChartPermalink( }); } -export function getDashboardPermalink( - dashboardId: string, - filterState: JsonObject, - hash?: string, -) { +export function getDashboardPermalink({ + dashboardId, + filterState, + hash, // the anchor part of the link which corresponds to the tab/chart id +}: { + dashboardId: string | number; + filterState: JsonObject; + hash?: string; +}) { // only encode filter box state if non-empty return getPermalink(`/api/v1/dashboard/${dashboardId}/permalink`, { filterState, diff --git a/superset-frontend/src/views/CRUD/alert/AlertList.tsx b/superset-frontend/src/views/CRUD/alert/AlertList.tsx index f0f9d7423b24b..66fc0109238a5 100644 --- a/superset-frontend/src/views/CRUD/alert/AlertList.tsx +++ b/superset-frontend/src/views/CRUD/alert/AlertList.tsx @@ -89,7 +89,7 @@ function AlertList({ const title = isReportEnabled ? t('report') : t('alert'); const titlePlural = isReportEnabled ? t('reports') : t('alerts'); const pathName = isReportEnabled ? 'Reports' : 'Alerts'; - const initalFilters = useMemo( + const initialFilters = useMemo( () => [ { id: 'type', @@ -117,7 +117,7 @@ function AlertList({ addDangerToast, true, undefined, - initalFilters, + initialFilters, ); const { updateResource } = useSingleViewResource>( @@ -261,9 +261,15 @@ function AlertList({ size: 'xl', }, { - accessor: 'created_by', + Cell: ({ + row: { + original: { created_by }, + }, + }: any) => + created_by ? `${created_by.first_name} ${created_by.last_name}` : '', + Header: t('Created by'), + id: 'created_by', disableSortBy: true, - hidden: true, size: 'xl', }, { @@ -378,6 +384,22 @@ function AlertList({ const filters: Filters = useMemo( () => [ + { + Header: t('Owner'), + id: 'owners', + input: 'select', + operator: FilterOperator.relationManyMany, + unfilteredLabel: 'All', + fetchSelects: createFetchRelated( + 'report', + 'owners', + createErrorHandler(errMsg => + t('An error occurred while fetching owners values: %s', errMsg), + ), + user, + ), + paginate: true, + }, { Header: t('Created by'), id: 'created_by', diff --git a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx index 822b129c56de7..5d36c2994dcab 100644 --- a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx +++ b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx @@ -16,58 +16,138 @@ * specific language governing permissions and limitations * under the License. */ - import React from 'react'; -import { ReactWrapper } from 'enzyme'; -import { styledMount as mount } from 'spec/helpers/theming'; -import { CronPicker } from 'src/components/CronPicker'; -import { Input } from 'src/components/Input'; -import { AlertReportCronScheduler } from './AlertReportCronScheduler'; +import { render, screen, waitFor, within } from 'spec/helpers/testing-library'; +import userEvent from '@testing-library/user-event'; +import { act } from 'react-dom/test-utils'; + +import { + AlertReportCronScheduler, + AlertReportCronSchedulerProps, +} from './AlertReportCronScheduler'; + +const createProps = (props: Partial = {}) => ({ + onChange: jest.fn(), + value: '* * * * *', + ...props, +}); + +test('should render', () => { + const props = createProps(); + render(); + + // Text found in the first radio option + expect(screen.getByText('Every')).toBeInTheDocument(); + // Text found in the second radio option + expect(screen.getByText('CRON Schedule')).toBeInTheDocument(); +}); + +test('only one radio option should be enabled at a time', () => { + const props = createProps(); + const { container } = render(); + + expect(screen.getByTestId('picker')).toBeChecked(); + expect(screen.getByTestId('input')).not.toBeChecked(); + + const pickerContainer = container.querySelector( + '.react-js-cron-select', + ) as HTMLElement; + const inputContainer = screen.getByTestId('input-content'); + + expect(within(pickerContainer).getAllByRole('combobox')[0]).toBeEnabled(); + expect(inputContainer.querySelector('input[name="crontab"]')).toBeDisabled(); + + userEvent.click(screen.getByTestId('input')); + + expect(within(pickerContainer).getAllByRole('combobox')[0]).toBeDisabled(); + expect(inputContainer.querySelector('input[name="crontab"]')).toBeEnabled(); + + userEvent.click(screen.getByTestId('picker')); + + expect(within(pickerContainer).getAllByRole('combobox')[0]).toBeEnabled(); + expect(inputContainer.querySelector('input[name="crontab"]')).toBeDisabled(); +}); + +test('picker mode updates correctly', async () => { + const onChangeCallback = jest.fn(); + const props = createProps({ + onChange: onChangeCallback, + }); + + const { container } = render(); -describe('AlertReportCronScheduler', () => { - let wrapper: ReactWrapper; + expect(screen.getByTestId('picker')).toBeChecked(); - it('calls onChnage when value chnages', () => { - const onChangeMock = jest.fn(); - wrapper = mount( - , - ); + const pickerContainer = container.querySelector( + '.react-js-cron-select', + ) as HTMLElement; - const changeValue = '1,7 * * * *'; + const firstSelect = within(pickerContainer).getAllByRole('combobox')[0]; + act(() => { + userEvent.click(firstSelect); + }); - wrapper.find(CronPicker).props().setValue(changeValue); - expect(onChangeMock).toHaveBeenLastCalledWith(changeValue); + expect(await within(pickerContainer).findByText('day')).toBeInTheDocument(); + act(() => { + userEvent.click(within(pickerContainer).getByText('day')); }); - it.skip('sets input value when cron picker changes', () => { - const onChangeMock = jest.fn(); - wrapper = mount( - , - ); + expect(onChangeCallback).toHaveBeenLastCalledWith('* * * * *'); + + const secondSelect = container.querySelector( + '.react-js-cron-hours .ant-select-selector', + ) as HTMLElement; + await waitFor(() => { + expect(secondSelect).toBeInTheDocument(); + }); + + act(() => { + userEvent.click(secondSelect); + }); - const changeValue = '1,7 * * * *'; + expect(await screen.findByText('9')).toBeInTheDocument(); + act(() => { + userEvent.click(screen.getByText('9')); + }); - wrapper.find(CronPicker).props().setValue(changeValue); - // TODO fix this class-style assertion that doesn't work on function components - // @ts-ignore - expect(wrapper.find(Input).state().value).toEqual(changeValue); + await waitFor(() => { + expect(onChangeCallback).toHaveBeenLastCalledWith('* 9 * * *'); }); +}); - it('calls onChange when input value changes', () => { - const onChangeMock = jest.fn(); - wrapper = mount( - , - ); - - const changeValue = '1,7 * * * *'; - const event = { - target: { value: changeValue }, - } as React.FocusEvent; - - const inputProps = wrapper.find(Input).props(); - if (inputProps.onBlur) { - inputProps.onBlur(event); - } - expect(onChangeMock).toHaveBeenLastCalledWith(changeValue); +test('input mode updates correctly', async () => { + const onChangeCallback = jest.fn(); + const props = createProps({ + onChange: onChangeCallback, }); + + render(); + + const inputContainer = screen.getByTestId('input-content'); + userEvent.click(screen.getByTestId('input')); + + const input = inputContainer.querySelector( + 'input[name="crontab"]', + ) as HTMLElement; + await waitFor(() => { + expect(input).toBeEnabled(); + }); + + userEvent.clear(input); + expect(input).toHaveValue(''); + + const value = '* 10 2 * *'; + await act(async () => { + await userEvent.type(input, value, { delay: 1 }); + }); + + await waitFor(() => { + expect(input).toHaveValue(value); + }); + + act(() => { + userEvent.click(inputContainer); + }); + + expect(onChangeCallback).toHaveBeenLastCalledWith(value); }); diff --git a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx index 867ee880d7d73..5418842aeaaa5 100644 --- a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx +++ b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx @@ -16,27 +16,33 @@ * specific language governing permissions and limitations * under the License. */ -import React, { useState, useCallback, useRef, FunctionComponent } from 'react'; +import React, { useState, useCallback, useRef, FocusEvent } from 'react'; import { t, useTheme } from '@superset-ui/core'; -import { AntdInput } from 'src/components'; +import { AntdInput, RadioChangeEvent } from 'src/components'; import { Input } from 'src/components/Input'; import { Radio } from 'src/components/Radio'; import { CronPicker, CronError } from 'src/components/CronPicker'; import { StyledInputContainer } from 'src/views/CRUD/alert/AlertReportModal'; -interface AlertReportCronSchedulerProps { +export interface AlertReportCronSchedulerProps { value: string; onChange: (change: string) => any; } -export const AlertReportCronScheduler: FunctionComponent = +export const AlertReportCronScheduler: React.FC = ({ value, onChange }) => { const theme = useTheme(); const inputRef = useRef(null); const [scheduleFormat, setScheduleFormat] = useState<'picker' | 'input'>( 'picker', ); + + const handleRadioButtonChange = useCallback( + (e: RadioChangeEvent) => setScheduleFormat(e.target.value), + [], + ); + const customSetValue = useCallback( (newValue: string) => { onChange(newValue); @@ -44,16 +50,25 @@ export const AlertReportCronScheduler: FunctionComponent) => { + onChange(event.target.value); + }, + [onChange], + ); + + const handlePressEnter = useCallback(() => { + onChange(inputRef.current?.input.value || ''); + }, [onChange]); + const [error, onError] = useState(); return ( <> - setScheduleFormat(e.target.value)} - value={scheduleFormat} - > +
- +
- + CRON Schedule - +
{ - onChange(event.target.value); - }} - onPressEnter={() => { - onChange(inputRef.current?.input.value || ''); - }} + onBlur={handleBlur} + onPressEnter={handlePressEnter} />
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/TableCatalog.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/TableCatalog.tsx index bc8cb40c161c7..fb70b9c3652a1 100644 --- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/TableCatalog.tsx +++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/TableCatalog.tsx @@ -34,7 +34,6 @@ export const TableCatalog = ({ }: FieldPropTypes) => { const tableCatalog = db?.catalog || []; const catalogError = validationErrors || {}; - return (

diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx index c4faa8a483ebe..a6e93f8653271 100644 --- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx +++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx @@ -96,49 +96,6 @@ const engineSpecificAlertMapping = { }, }; -const errorAlertMapping = { - GENERIC_DB_ENGINE_ERROR: { - message: t('Generic database engine error'), - }, - CONNECTION_MISSING_PARAMETERS_ERROR: { - message: t('Missing Required Fields'), - description: t('Please complete all required fields.'), - }, - CONNECTION_INVALID_HOSTNAME_ERROR: { - message: t('Could not verify the host'), - description: t( - 'The host is invalid. Please verify that this field is entered correctly.', - ), - }, - CONNECTION_PORT_CLOSED_ERROR: { - message: t('Port is closed'), - description: t('Please verify that port is open to connect.'), - }, - CONNECTION_INVALID_PORT_ERROR: { - message: t('Invalid Port Number'), - description: t( - 'The port must be a whole number less than or equal to 65535.', - ), - }, - CONNECTION_ACCESS_DENIED_ERROR: { - message: t('Invalid account information'), - description: t('Either the username or password is incorrect.'), - }, - CONNECTION_INVALID_PASSWORD_ERROR: { - message: t('Invalid account information'), - description: t('Either the username or password is incorrect.'), - }, - INVALID_PAYLOAD_SCHEMA_ERROR: { - message: t('Incorrect Fields'), - description: t('Please make sure all fields are filled out correctly'), - }, - TABLE_DOES_NOT_EXIST_ERROR: { - message: t('URL could not be identified'), - description: t( - 'The URL could not be identified. Please check for typos and make sure that "Type of google sheet allowed" selection matches the input', - ), - }, -}; const googleSheetConnectionEngine = 'gsheets'; interface DatabaseModalProps { @@ -227,7 +184,7 @@ function dbReducer( }; let query = {}; let query_input = ''; - let deserializeExtraJSON = { allows_virtual_table_explore: true }; + let deserializeExtraJSON = {}; let extra_json: DatabaseObject['extra_json']; switch (action.type) { @@ -576,8 +533,8 @@ const DatabaseModal: FunctionComponent = ({ if (dbToUpdate.configuration_method === CONFIGURATION_METHOD.DYNAMIC_FORM) { // Validate DB before saving - await getValidation(dbToUpdate, true); - if (validationErrors && !isEmpty(validationErrors)) { + const errors = await getValidation(dbToUpdate, true); + if ((validationErrors && !isEmpty(validationErrors)) || errors) { return; } const parameters_schema = isEditMode @@ -679,7 +636,6 @@ const DatabaseModal: FunctionComponent = ({ passwords, confirmedOverwrite, ); - if (dbId) { onClose(); addSuccessToast(t('Database connected')); @@ -1112,44 +1068,21 @@ const DatabaseModal: FunctionComponent = ({ ); }; + // eslint-disable-next-line consistent-return const errorAlert = () => { - if ( - isEmpty(dbErrors) || - (isEmpty(validationErrors) && - !(validationErrors?.error_type in errorAlertMapping)) - ) { - return <>; - } - - if (validationErrors) { + if (isEmpty(dbErrors) === false) { + const message: Array = + typeof dbErrors === 'object' ? Object.values(dbErrors) : []; return ( antDErrorAlertStyles(theme)} - message={ - errorAlertMapping[validationErrors?.error_type]?.message || - validationErrors?.error_type - } - description={ - errorAlertMapping[validationErrors?.error_type]?.description || - validationErrors?.description || - JSON.stringify(validationErrors) - } - showIcon - closable={false} + message={t('Database Creation Error')} + description={message?.[0] || dbErrors} /> ); } - const message: Array = - typeof dbErrors === 'object' ? Object.values(dbErrors) : []; - return ( - antDErrorAlertStyles(theme)} - message={t('Database Creation Error')} - description={message?.[0] || dbErrors} - /> - ); + return <>; }; const renderFinishState = () => { diff --git a/superset-frontend/src/views/CRUD/hooks.ts b/superset-frontend/src/views/CRUD/hooks.ts index 5a0e26131efc0..ba544909cbead 100644 --- a/superset-frontend/src/views/CRUD/hooks.ts +++ b/superset-frontend/src/views/CRUD/hooks.ts @@ -777,6 +777,7 @@ export function useDatabaseValidation() { {}, ); setValidationErrors(parsedErrors); + return parsedErrors; }); } // eslint-disable-next-line no-console diff --git a/superset-frontend/tools/eslint-plugin-theme-colors/package.json b/superset-frontend/tools/eslint-plugin-theme-colors/package.json index 6832811e8a386..25938c97bd8d4 100644 --- a/superset-frontend/tools/eslint-plugin-theme-colors/package.json +++ b/superset-frontend/tools/eslint-plugin-theme-colors/package.json @@ -9,9 +9,5 @@ "keywords": [], "license": "Apache-2.0", "author": "Apache", - "dependencies": {}, - "engines": { - "node": "^16.9.1", - "npm": "^7.5.4" - } + "dependencies": {} } diff --git a/superset-frontend/webpack.config.js b/superset-frontend/webpack.config.js index d6b280c7537e0..6fff90105d3b1 100644 --- a/superset-frontend/webpack.config.js +++ b/superset-frontend/webpack.config.js @@ -383,7 +383,9 @@ const config = { loader: 'less-loader', options: { sourceMap: true, - javascriptEnabled: true, + lessOptions: { + javascriptEnabled: true, + }, }, }, ], diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index 07c3ef4c9b8e6..808666237672a 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -1541,9 +1541,9 @@ } }, "node_modules/async": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", - "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", + "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==" }, "node_modules/asynckit": { "version": "0.4.0", @@ -4341,9 +4341,9 @@ } }, "node_modules/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", "dev": true }, "node_modules/ms": { @@ -6966,9 +6966,9 @@ "dev": true }, "async": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", - "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", + "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==" }, "asynckit": { "version": "0.4.0", @@ -9172,9 +9172,9 @@ } }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", "dev": true }, "ms": { diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index 2a967eda27f9d..614bfeb0cae19 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -17,6 +17,7 @@ # pylint: disable=too-many-lines from __future__ import annotations +import inspect from typing import Any, Dict, Optional, TYPE_CHECKING from flask_babel import gettext as _ @@ -27,7 +28,7 @@ from superset import app from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.db_engine_specs.base import builtin_time_grains -from superset.utils import schema as utils +from superset.utils import pandas_postprocessing, schema as utils from superset.utils.core import ( AnnotationType, FilterOperator, @@ -770,24 +771,12 @@ class ChartDataPostProcessingOperationSchema(Schema): description="Post processing operation type", required=True, validate=validate.OneOf( - choices=( - "aggregate", - "boxplot", - "contribution", - "cum", - "geodetic_parse", - "geohash_decode", - "geohash_encode", - "pivot", - "prophet", - "rolling", - "select", - "sort", - "diff", - "compare", - "resample", - "flatten", - ) + choices=[ + name + for name, value in inspect.getmembers( + pandas_postprocessing, inspect.isfunction + ) + ] ), example="aggregate", ) diff --git a/superset/columns/models.py b/superset/columns/models.py index fbe045e3d3925..bfee3de859819 100644 --- a/superset/columns/models.py +++ b/superset/columns/models.py @@ -23,7 +23,6 @@ These models are not fully implemented, and shouldn't be used yet. """ - import sqlalchemy as sa from flask_appbuilder import Model @@ -33,6 +32,8 @@ ImportExportMixin, ) +UNKOWN_TYPE = "UNKNOWN" + class Column( Model, @@ -52,51 +53,58 @@ class Column( id = sa.Column(sa.Integer, primary_key=True) + # Assuming the column is an aggregation, is it additive? Useful for determining which + # aggregations can be done on the metric. Eg, ``COUNT(DISTINCT user_id)`` is not + # additive, so it shouldn't be used in a ``SUM``. + is_additive = sa.Column(sa.Boolean, default=False) + + # Is this column an aggregation (metric)? + is_aggregation = sa.Column(sa.Boolean, default=False) + + is_filterable = sa.Column(sa.Boolean, nullable=False, default=True) + is_dimensional = sa.Column(sa.Boolean, nullable=False, default=False) + + # Is an increase desired? Useful for displaying the results of A/B tests, or setting + # up alerts. Eg, this is true for "revenue", but false for "latency". + is_increase_desired = sa.Column(sa.Boolean, default=True) + + # Column is managed externally and should be read-only inside Superset + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + + # Is this column a partition? Useful for scheduling queries and previewing the latest + # data. + is_partition = sa.Column(sa.Boolean, default=False) + + # Does the expression point directly to a physical column? + is_physical = sa.Column(sa.Boolean, default=True) + + # Is this a spatial column? This could be leveraged in the future for spatial + # visualizations. + is_spatial = sa.Column(sa.Boolean, default=False) + + # Is this a time column? Useful for plotting time series. + is_temporal = sa.Column(sa.Boolean, default=False) + # We use ``sa.Text`` for these attributes because (1) in modern databases the # performance is the same as ``VARCHAR``[1] and (2) because some table names can be # **really** long (eg, Google Sheets URLs). # # [1] https://www.postgresql.org/docs/9.1/datatype-character.html name = sa.Column(sa.Text) - type = sa.Column(sa.Text) + # Raw type as returned and used by db engine. + type = sa.Column(sa.Text, default=UNKOWN_TYPE) # Columns are defined by expressions. For tables, these are the actual columns names, # and should match the ``name`` attribute. For datasets, these can be any valid SQL # expression. If the SQL expression is an aggregation the column is a metric, # otherwise it's a computed column. expression = sa.Column(sa.Text) - - # Does the expression point directly to a physical column? - is_physical = sa.Column(sa.Boolean, default=True) + unit = sa.Column(sa.Text) # Additional metadata describing the column. description = sa.Column(sa.Text) warning_text = sa.Column(sa.Text) - unit = sa.Column(sa.Text) - - # Is this a time column? Useful for plotting time series. - is_temporal = sa.Column(sa.Boolean, default=False) - - # Is this a spatial column? This could be leveraged in the future for spatial - # visualizations. - is_spatial = sa.Column(sa.Boolean, default=False) - - # Is this column a partition? Useful for scheduling queries and previewing the latest - # data. - is_partition = sa.Column(sa.Boolean, default=False) - - # Is this column an aggregation (metric)? - is_aggregation = sa.Column(sa.Boolean, default=False) - - # Assuming the column is an aggregation, is it additive? Useful for determining which - # aggregations can be done on the metric. Eg, ``COUNT(DISTINCT user_id)`` is not - # additive, so it shouldn't be used in a ``SUM``. - is_additive = sa.Column(sa.Boolean, default=False) - - # Is an increase desired? Useful for displaying the results of A/B tests, or setting - # up alerts. Eg, this is true for "revenue", but false for "latency". - is_increase_desired = sa.Column(sa.Boolean, default=True) - - # Column is managed externally and should be read-only inside Superset - is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) external_url = sa.Column(sa.Text, nullable=True) + + def __repr__(self) -> str: + return f"" diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index 9aacb0dc8c641..3d22857912f10 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -31,7 +31,7 @@ from superset.models.slice import Slice from superset.superset_typing import FilterValue, FilterValues, QueryObjectDict from superset.utils import core as utils -from superset.utils.core import GenericDataType +from superset.utils.core import GenericDataType, MediumText METRIC_FORM_DATA_PARAMS = [ "metric", @@ -586,7 +586,7 @@ class BaseColumn(AuditMixinNullable, ImportExportMixin): type = Column(Text) groupby = Column(Boolean, default=True) filterable = Column(Boolean, default=True) - description = Column(Text) + description = Column(MediumText()) is_dttm = None # [optional] Set this to support import/export functionality @@ -672,7 +672,7 @@ class BaseMetric(AuditMixinNullable, ImportExportMixin): metric_name = Column(String(255), nullable=False) verbose_name = Column(String(1024)) metric_type = Column(String(32)) - description = Column(Text) + description = Column(MediumText()) d3format = Column(String(128)) warning_text = Column(Text) diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index d7d62db2a7e0e..e0382c659514c 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -24,6 +24,7 @@ from datetime import datetime, timedelta from typing import ( Any, + Callable, cast, Dict, Hashable, @@ -34,6 +35,7 @@ Type, Union, ) +from uuid import uuid4 import dateutil.parser import numpy as np @@ -72,13 +74,13 @@ from sqlalchemy.sql.selectable import Alias, TableClause from superset import app, db, is_feature_enabled, security_manager -from superset.columns.models import Column as NewColumn +from superset.columns.models import Column as NewColumn, UNKOWN_TYPE from superset.common.db_query_status import QueryStatus from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric from superset.connectors.sqla.utils import ( + find_cached_objects_in_session, get_physical_table_metadata, get_virtual_table_metadata, - load_or_create_tables, validate_adhoc_subquery, ) from superset.datasets.models import Dataset as NewDataset @@ -100,7 +102,12 @@ clone_model, QueryResult, ) -from superset.sql_parse import ParsedQuery, sanitize_clause +from superset.sql_parse import ( + extract_table_references, + ParsedQuery, + sanitize_clause, + Table as TableName, +) from superset.superset_typing import ( AdhocColumn, AdhocMetric, @@ -114,6 +121,7 @@ GenericDataType, get_column_name, is_adhoc_column, + MediumText, QueryObjectFilterClause, remove_duplicates, ) @@ -130,6 +138,7 @@ "sum", "doubleSum", } +ADDITIVE_METRIC_TYPES_LOWER = {op.lower() for op in ADDITIVE_METRIC_TYPES} class SqlaQuery(NamedTuple): @@ -215,13 +224,13 @@ class TableColumn(Model, BaseColumn, CertificationMixin): __tablename__ = "table_columns" __table_args__ = (UniqueConstraint("table_id", "column_name"),) table_id = Column(Integer, ForeignKey("tables.id")) - table = relationship( + table: "SqlaTable" = relationship( "SqlaTable", backref=backref("columns", cascade="all, delete-orphan"), foreign_keys=[table_id], ) is_dttm = Column(Boolean, default=False) - expression = Column(Text) + expression = Column(MediumText()) python_date_format = Column(String(255)) extra = Column(Text) @@ -417,6 +426,59 @@ def data(self) -> Dict[str, Any]: return attr_dict + def to_sl_column( + self, known_columns: Optional[Dict[str, NewColumn]] = None + ) -> NewColumn: + """Convert a TableColumn to NewColumn""" + column = known_columns.get(self.uuid) if known_columns else None + if not column: + column = NewColumn() + + extra_json = self.get_extra_dict() + for attr in { + "verbose_name", + "python_date_format", + }: + value = getattr(self, attr) + if value: + extra_json[attr] = value + + column.uuid = self.uuid + column.created_on = self.created_on + column.changed_on = self.changed_on + column.created_by = self.created_by + column.changed_by = self.changed_by + column.name = self.column_name + column.type = self.type or UNKOWN_TYPE + column.expression = self.expression or self.table.quote_identifier( + self.column_name + ) + column.description = self.description + column.is_aggregation = False + column.is_dimensional = self.groupby + column.is_filterable = self.filterable + column.is_increase_desired = True + column.is_managed_externally = self.table.is_managed_externally + column.is_partition = False + column.is_physical = not self.expression + column.is_spatial = False + column.is_temporal = self.is_dttm + column.extra_json = json.dumps(extra_json) if extra_json else None + column.external_url = self.table.external_url + + return column + + @staticmethod + def after_delete( # pylint: disable=unused-argument + mapper: Mapper, + connection: Connection, + target: "TableColumn", + ) -> None: + session = inspect(target).session + column = session.query(NewColumn).filter_by(uuid=target.uuid).one_or_none() + if column: + session.delete(column) + class SqlMetric(Model, BaseMetric, CertificationMixin): @@ -430,7 +492,7 @@ class SqlMetric(Model, BaseMetric, CertificationMixin): backref=backref("metrics", cascade="all, delete-orphan"), foreign_keys=[table_id], ) - expression = Column(Text, nullable=False) + expression = Column(MediumText(), nullable=False) extra = Column(Text) export_fields = [ @@ -479,6 +541,58 @@ def data(self) -> Dict[str, Any]: attr_dict.update(super().data) return attr_dict + def to_sl_column( + self, known_columns: Optional[Dict[str, NewColumn]] = None + ) -> NewColumn: + """Convert a SqlMetric to NewColumn. Find and update existing or + create a new one.""" + column = known_columns.get(self.uuid) if known_columns else None + if not column: + column = NewColumn() + + extra_json = self.get_extra_dict() + for attr in {"verbose_name", "metric_type", "d3format"}: + value = getattr(self, attr) + if value is not None: + extra_json[attr] = value + is_additive = ( + self.metric_type and self.metric_type.lower() in ADDITIVE_METRIC_TYPES_LOWER + ) + + column.uuid = self.uuid + column.name = self.metric_name + column.created_on = self.created_on + column.changed_on = self.changed_on + column.created_by = self.created_by + column.changed_by = self.changed_by + column.type = UNKOWN_TYPE + column.expression = self.expression + column.warning_text = self.warning_text + column.description = self.description + column.is_aggregation = True + column.is_additive = is_additive + column.is_filterable = False + column.is_increase_desired = True + column.is_managed_externally = self.table.is_managed_externally + column.is_partition = False + column.is_physical = False + column.is_spatial = False + column.extra_json = json.dumps(extra_json) if extra_json else None + column.external_url = self.table.external_url + + return column + + @staticmethod + def after_delete( # pylint: disable=unused-argument + mapper: Mapper, + connection: Connection, + target: "SqlMetric", + ) -> None: + session = inspect(target).session + column = session.query(NewColumn).filter_by(uuid=target.uuid).one_or_none() + if column: + session.delete(column) + sqlatable_user = Table( "sqlatable_user", @@ -544,7 +658,7 @@ class SqlaTable(Model, BaseDatasource): # pylint: disable=too-many-public-metho foreign_keys=[database_id], ) schema = Column(String(255)) - sql = Column(Text) + sql = Column(MediumText()) is_sqllab_view = Column(Boolean, default=False) template_params = Column(Text) extra = Column(Text) @@ -1731,7 +1845,19 @@ def fetch_metadata(self, commit: bool = True) -> MetadataResult: metrics = [] any_date_col = None db_engine_spec = self.db_engine_spec - old_columns = db.session.query(TableColumn).filter(TableColumn.table == self) + + # If no `self.id`, then this is a new table, no need to fetch columns + # from db. Passing in `self.id` to query will actually automatically + # generate a new id, which can be tricky during certain transactions. + old_columns = ( + ( + db.session.query(TableColumn) + .filter(TableColumn.table_id == self.id) + .all() + ) + if self.id + else self.columns + ) old_columns_by_name: Dict[str, TableColumn] = { col.column_name: col for col in old_columns @@ -1745,13 +1871,15 @@ def fetch_metadata(self, commit: bool = True) -> MetadataResult: ) # clear old columns before adding modified columns back - self.columns = [] + columns = [] for col in new_columns: old_column = old_columns_by_name.pop(col["name"], None) if not old_column: results.added.append(col["name"]) new_column = TableColumn( - column_name=col["name"], type=col["type"], table=self + column_name=col["name"], + type=col["type"], + table=self, ) new_column.is_dttm = new_column.is_temporal db_engine_spec.alter_new_orm_column(new_column) @@ -1763,12 +1891,14 @@ def fetch_metadata(self, commit: bool = True) -> MetadataResult: new_column.expression = "" new_column.groupby = True new_column.filterable = True - self.columns.append(new_column) + columns.append(new_column) if not any_date_col and new_column.is_temporal: any_date_col = col["name"] - self.columns.extend( - [col for col in old_columns_by_name.values() if col.expression] - ) + + # add back calculated (virtual) columns + columns.extend([col for col in old_columns if col.expression]) + self.columns = columns + metrics.append( SqlMetric( metric_name="count", @@ -1854,6 +1984,10 @@ class and any keys added via `ExtraCache`. extra_cache_keys += sqla_query.extra_cache_keys return extra_cache_keys + @property + def quote_identifier(self) -> Callable[[str], str]: + return self.database.quote_identifier + @staticmethod def before_update( mapper: Mapper, # pylint: disable=unused-argument @@ -1895,14 +2029,44 @@ def before_update( ): raise Exception(get_dataset_exist_error_msg(target.full_name)) + def get_sl_columns(self) -> List[NewColumn]: + """ + Convert `SqlaTable.columns` and `SqlaTable.metrics` to the new Column model + """ + session: Session = inspect(self).session + + uuids = set() + for column_or_metric in self.columns + self.metrics: + # pre-assign uuid after new columns or metrics are inserted so + # the related `NewColumn` can have a deterministic uuid, too + if not column_or_metric.uuid: + column_or_metric.uuid = uuid4() + else: + uuids.add(column_or_metric.uuid) + + # load existing columns from cached session states first + existing_columns = set( + find_cached_objects_in_session(session, NewColumn, uuids=uuids) + ) + for column in existing_columns: + uuids.remove(column.uuid) + + if uuids: + # load those not found from db + existing_columns |= set( + session.query(NewColumn).filter(NewColumn.uuid.in_(uuids)) + ) + + known_columns = {column.uuid: column for column in existing_columns} + return [ + item.to_sl_column(known_columns) for item in self.columns + self.metrics + ] + @staticmethod def update_table( # pylint: disable=unused-argument mapper: Mapper, connection: Connection, target: Union[SqlMetric, TableColumn] ) -> None: """ - Forces an update to the table's changed_on value when a metric or column on the - table is updated. This busts the cache key for all charts that use the table. - :param mapper: Unused. :param connection: Unused. :param target: The metric or column that was updated. @@ -1910,90 +2074,43 @@ def update_table( # pylint: disable=unused-argument inspector = inspect(target) session = inspector.session - # get DB-specific conditional quoter for expressions that point to columns or - # table names - database = ( - target.table.database - or session.query(Database).filter_by(id=target.database_id).one() - ) - engine = database.get_sqla_engine(schema=target.table.schema) - conditional_quote = engine.dialect.identifier_preparer.quote - + # Forces an update to the table's changed_on value when a metric or column on the + # table is updated. This busts the cache key for all charts that use the table. session.execute(update(SqlaTable).where(SqlaTable.id == target.table.id)) - dataset = ( - session.query(NewDataset) - .filter_by(sqlatable_id=target.table.id) - .one_or_none() - ) - - if not dataset: - # if dataset is not found create a new copy - # of the dataset instead of updating the existing - - SqlaTable.write_shadow_dataset(target.table, database, session) - return - - # update ``Column`` model as well - if isinstance(target, TableColumn): - columns = [ - column - for column in dataset.columns - if column.name == target.column_name - ] - if not columns: - return - - column = columns[0] - extra_json = json.loads(target.extra or "{}") - for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: - value = getattr(target, attr) - if value: - extra_json[attr] = value - - column.name = target.column_name - column.type = target.type or "Unknown" - column.expression = target.expression or conditional_quote( - target.column_name + # if table itself has changed, shadow-writing will happen in `after_udpate` anyway + if target.table not in session.dirty: + dataset: NewDataset = ( + session.query(NewDataset) + .filter_by(uuid=target.table.uuid) + .one_or_none() ) - column.description = target.description - column.is_temporal = target.is_dttm - column.is_physical = target.expression is None - column.extra_json = json.dumps(extra_json) if extra_json else None - - else: # SqlMetric - columns = [ - column - for column in dataset.columns - if column.name == target.metric_name - ] - if not columns: + # Update shadow dataset and columns + # did we find the dataset? + if not dataset: + # if dataset is not found create a new copy + target.table.write_shadow_dataset() return - column = columns[0] - extra_json = json.loads(target.extra or "{}") - for attr in {"verbose_name", "metric_type", "d3format"}: - value = getattr(target, attr) - if value: - extra_json[attr] = value - - is_additive = ( - target.metric_type - and target.metric_type.lower() in ADDITIVE_METRIC_TYPES + # update changed_on timestamp + session.execute(update(NewDataset).where(NewDataset.id == dataset.id)) + + # update `Column` model as well + session.add( + target.to_sl_column( + { + target.uuid: session.query(NewColumn) + .filter_by(uuid=target.uuid) + .one_or_none() + } + ) ) - column.name = target.metric_name - column.expression = target.expression - column.warning_text = target.warning_text - column.description = target.description - column.is_additive = is_additive - column.extra_json = json.dumps(extra_json) if extra_json else None - @staticmethod def after_insert( mapper: Mapper, connection: Connection, - target: "SqlaTable", + sqla_table: "SqlaTable", ) -> None: """ Shadow write the dataset to new models. @@ -2007,24 +2124,14 @@ def after_insert( For more context: https://github.com/apache/superset/issues/14909 """ - session = inspect(target).session - # set permissions - security_manager.set_perm(mapper, connection, target) - - # get DB-specific conditional quoter for expressions that point to columns or - # table names - database = ( - target.database - or session.query(Database).filter_by(id=target.database_id).one() - ) - - SqlaTable.write_shadow_dataset(target, database, session) + security_manager.set_perm(mapper, connection, sqla_table) + sqla_table.write_shadow_dataset() @staticmethod def after_delete( # pylint: disable=unused-argument mapper: Mapper, connection: Connection, - target: "SqlaTable", + sqla_table: "SqlaTable", ) -> None: """ Shadow write the dataset to new models. @@ -2038,18 +2145,18 @@ def after_delete( # pylint: disable=unused-argument For more context: https://github.com/apache/superset/issues/14909 """ - session = inspect(target).session + session = inspect(sqla_table).session dataset = ( - session.query(NewDataset).filter_by(sqlatable_id=target.id).one_or_none() + session.query(NewDataset).filter_by(uuid=sqla_table.uuid).one_or_none() ) if dataset: session.delete(dataset) @staticmethod - def after_update( # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def after_update( mapper: Mapper, connection: Connection, - target: "SqlaTable", + sqla_table: "SqlaTable", ) -> None: """ Shadow write the dataset to new models. @@ -2063,172 +2170,76 @@ def after_update( # pylint: disable=too-many-branches, too-many-locals, too-man For more context: https://github.com/apache/superset/issues/14909 """ - inspector = inspect(target) + # set permissions + security_manager.set_perm(mapper, connection, sqla_table) + + inspector = inspect(sqla_table) session = inspector.session # double-check that ``UPDATE``s are actually pending (this method is called even # for instances that have no net changes to their column-based attributes) - if not session.is_modified(target, include_collections=True): + if not session.is_modified(sqla_table, include_collections=True): return - # set permissions - security_manager.set_perm(mapper, connection, target) - - dataset = ( - session.query(NewDataset).filter_by(sqlatable_id=target.id).one_or_none() + # find the dataset from the known instance list first + # (it could be either from a previous query or newly created) + dataset = next( + find_cached_objects_in_session( + session, NewDataset, uuids=[sqla_table.uuid] + ), + None, ) + # if not found, pull from database + if not dataset: + dataset = ( + session.query(NewDataset).filter_by(uuid=sqla_table.uuid).one_or_none() + ) if not dataset: + sqla_table.write_shadow_dataset() return - # get DB-specific conditional quoter for expressions that point to columns or - # table names - database = ( - target.database - or session.query(Database).filter_by(id=target.database_id).one() - ) - engine = database.get_sqla_engine(schema=target.schema) - conditional_quote = engine.dialect.identifier_preparer.quote - - # update columns - if inspector.attrs.columns.history.has_changes(): - # handle deleted columns - if inspector.attrs.columns.history.deleted: - column_names = { - column.column_name - for column in inspector.attrs.columns.history.deleted - } - dataset.columns = [ - column - for column in dataset.columns - if column.name not in column_names - ] - - # handle inserted columns - for column in inspector.attrs.columns.history.added: - # ``is_active`` might be ``None``, but it defaults to ``True``. - if column.is_active is False: - continue - - extra_json = json.loads(column.extra or "{}") - for attr in { - "groupby", - "filterable", - "verbose_name", - "python_date_format", - }: - value = getattr(column, attr) - if value: - extra_json[attr] = value - - dataset.columns.append( - NewColumn( - name=column.column_name, - type=column.type or "Unknown", - expression=column.expression - or conditional_quote(column.column_name), - description=column.description, - is_temporal=column.is_dttm, - is_aggregation=False, - is_physical=column.expression is None, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - extra_json=json.dumps(extra_json) if extra_json else None, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ) - ) - - # update metrics - if inspector.attrs.metrics.history.has_changes(): - # handle deleted metrics - if inspector.attrs.metrics.history.deleted: - column_names = { - metric.metric_name - for metric in inspector.attrs.metrics.history.deleted - } - dataset.columns = [ - column - for column in dataset.columns - if column.name not in column_names - ] - - # handle inserted metrics - for metric in inspector.attrs.metrics.history.added: - extra_json = json.loads(metric.extra or "{}") - for attr in {"verbose_name", "metric_type", "d3format"}: - value = getattr(metric, attr) - if value: - extra_json[attr] = value - - is_additive = ( - metric.metric_type - and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES - ) - - dataset.columns.append( - NewColumn( - name=metric.metric_name, - type="Unknown", - expression=metric.expression, - warning_text=metric.warning_text, - description=metric.description, - is_aggregation=True, - is_additive=is_additive, - is_physical=False, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - extra_json=json.dumps(extra_json) if extra_json else None, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ) - ) + # sync column list and delete removed columns + if ( + inspector.attrs.columns.history.has_changes() + or inspector.attrs.metrics.history.has_changes() + ): + # add pending new columns to known columns list, too, so if calling + # `after_update` twice before changes are persisted will not create + # two duplicate columns with the same uuids. + dataset.columns = sqla_table.get_sl_columns() # physical dataset - if target.sql is None: - physical_columns = [ - column for column in dataset.columns if column.is_physical - ] - - # if the table name changed we should create a new table instance, instead - # of reusing the original one + if not sqla_table.sql: + # if the table name changed we should relink the dataset to another table + # (and create one if necessary) if ( inspector.attrs.table_name.history.has_changes() or inspector.attrs.schema.history.has_changes() - or inspector.attrs.database_id.history.has_changes() + or inspector.attrs.database.history.has_changes() ): - # does the dataset point to an existing table? - table = ( - session.query(NewTable) - .filter_by( - database_id=target.database_id, - schema=target.schema, - name=target.table_name, - ) - .first() + tables = NewTable.bulk_load_or_create( + sqla_table.database, + [TableName(schema=sqla_table.schema, table=sqla_table.table_name)], + sync_columns=False, + default_props=dict( + changed_by=sqla_table.changed_by, + created_by=sqla_table.created_by, + is_managed_externally=sqla_table.is_managed_externally, + external_url=sqla_table.external_url, + ), ) - if not table: - # create new columns + if not tables[0].id: + # dataset columns will only be assigned to newly created tables + # existing tables should manage column syncing in another process physical_columns = [ - clone_model(column, ignore=["uuid"]) - for column in physical_columns + clone_model( + column, ignore=["uuid"], keep_relations=["changed_by"] + ) + for column in dataset.columns + if column.is_physical ] - - # create new table - table = NewTable( - name=target.table_name, - schema=target.schema, - catalog=None, - database_id=target.database_id, - columns=physical_columns, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ) - dataset.tables = [table] - elif dataset.tables: - table = dataset.tables[0] - table.columns = physical_columns + tables[0].columns = physical_columns + dataset.tables = tables # virtual dataset else: @@ -2237,29 +2248,34 @@ def after_update( # pylint: disable=too-many-branches, too-many-locals, too-man column.is_physical = False # update referenced tables if SQL changed - if inspector.attrs.sql.history.has_changes(): - parsed = ParsedQuery(target.sql) - referenced_tables = parsed.tables - - predicate = or_( - *[ - and_( - NewTable.schema == (table.schema or target.schema), - NewTable.name == table.table, - ) - for table in referenced_tables - ] + if sqla_table.sql and inspector.attrs.sql.history.has_changes(): + referenced_tables = extract_table_references( + sqla_table.sql, sqla_table.database.get_dialect().name + ) + dataset.tables = NewTable.bulk_load_or_create( + sqla_table.database, + referenced_tables, + default_schema=sqla_table.schema, + # sync metadata is expensive, we'll do it in another process + # e.g. when users open a Table page + sync_columns=False, + default_props=dict( + changed_by=sqla_table.changed_by, + created_by=sqla_table.created_by, + is_managed_externally=sqla_table.is_managed_externally, + external_url=sqla_table.external_url, + ), ) - dataset.tables = session.query(NewTable).filter(predicate).all() # update other attributes - dataset.name = target.table_name - dataset.expression = target.sql or conditional_quote(target.table_name) - dataset.is_physical = target.sql is None + dataset.name = sqla_table.table_name + dataset.expression = sqla_table.sql or sqla_table.quote_identifier( + sqla_table.table_name + ) + dataset.is_physical = not sqla_table.sql - @staticmethod - def write_shadow_dataset( # pylint: disable=too-many-locals - dataset: "SqlaTable", database: Database, session: Session + def write_shadow_dataset( + self: "SqlaTable", ) -> None: """ Shadow write the dataset to new models. @@ -2273,95 +2289,57 @@ def write_shadow_dataset( # pylint: disable=too-many-locals For more context: https://github.com/apache/superset/issues/14909 """ - - engine = database.get_sqla_engine(schema=dataset.schema) - conditional_quote = engine.dialect.identifier_preparer.quote + session = inspect(self).session + # make sure database points to the right instance, in case only + # `table.database_id` is updated and the changes haven't been + # consolidated by SQLA + if self.database_id and ( + not self.database or self.database.id != self.database_id + ): + self.database = session.query(Database).filter_by(id=self.database_id).one() # create columns columns = [] - for column in dataset.columns: - # ``is_active`` might be ``None`` at this point, but it defaults to ``True``. - if column.is_active is False: - continue - - try: - extra_json = json.loads(column.extra or "{}") - except json.decoder.JSONDecodeError: - extra_json = {} - for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: - value = getattr(column, attr) - if value: - extra_json[attr] = value - - columns.append( - NewColumn( - name=column.column_name, - type=column.type or "Unknown", - expression=column.expression - or conditional_quote(column.column_name), - description=column.description, - is_temporal=column.is_dttm, - is_aggregation=False, - is_physical=column.expression is None, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - extra_json=json.dumps(extra_json) if extra_json else None, - is_managed_externally=dataset.is_managed_externally, - external_url=dataset.external_url, - ), - ) - - # create metrics - for metric in dataset.metrics: - try: - extra_json = json.loads(metric.extra or "{}") - except json.decoder.JSONDecodeError: - extra_json = {} - for attr in {"verbose_name", "metric_type", "d3format"}: - value = getattr(metric, attr) - if value: - extra_json[attr] = value - - is_additive = ( - metric.metric_type - and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES - ) - - columns.append( - NewColumn( - name=metric.metric_name, - type="Unknown", # figuring this out would require a type inferrer - expression=metric.expression, - warning_text=metric.warning_text, - description=metric.description, - is_aggregation=True, - is_additive=is_additive, - is_physical=False, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - extra_json=json.dumps(extra_json) if extra_json else None, - is_managed_externally=dataset.is_managed_externally, - external_url=dataset.external_url, - ), - ) + for item in self.columns + self.metrics: + item.created_by = self.created_by + item.changed_by = self.changed_by + # on `SqlaTable.after_insert`` event, although the table itself + # already has a `uuid`, the associated columns will not. + # Here we pre-assign a uuid so they can still be matched to the new + # Column after creation. + if not item.uuid: + item.uuid = uuid4() + columns.append(item.to_sl_column()) # physical dataset - if not dataset.sql: - physical_columns = [column for column in columns if column.is_physical] - - # create table - table = NewTable( - name=dataset.table_name, - schema=dataset.schema, - catalog=None, # currently not supported - database_id=dataset.database_id, - columns=physical_columns, - is_managed_externally=dataset.is_managed_externally, - external_url=dataset.external_url, + if not self.sql: + # always create separate column entries for Dataset and Table + # so updating a dataset would not update columns in the related table + physical_columns = [ + clone_model( + column, + ignore=["uuid"], + # `created_by` will always be left empty because it'd always + # be created via some sort of automated system. + # But keep `changed_by` in case someone manually changes + # column attributes such as `is_dttm`. + keep_relations=["changed_by"], + ) + for column in columns + if column.is_physical + ] + tables = NewTable.bulk_load_or_create( + self.database, + [TableName(schema=self.schema, table=self.table_name)], + sync_columns=False, + default_props=dict( + created_by=self.created_by, + changed_by=self.changed_by, + is_managed_externally=self.is_managed_externally, + external_url=self.external_url, + ), ) - tables = [table] + tables[0].columns = physical_columns # virtual dataset else: @@ -2370,26 +2348,39 @@ def write_shadow_dataset( # pylint: disable=too-many-locals column.is_physical = False # find referenced tables - parsed = ParsedQuery(dataset.sql) - referenced_tables = parsed.tables - tables = load_or_create_tables( - session, - database, - dataset.schema, + referenced_tables = extract_table_references( + self.sql, self.database.get_dialect().name + ) + tables = NewTable.bulk_load_or_create( + self.database, referenced_tables, - conditional_quote, + default_schema=self.schema, + # syncing table columns can be slow so we are not doing it here + sync_columns=False, + default_props=dict( + created_by=self.created_by, + changed_by=self.changed_by, + is_managed_externally=self.is_managed_externally, + external_url=self.external_url, + ), ) # create the new dataset new_dataset = NewDataset( - sqlatable_id=dataset.id, - name=dataset.table_name, - expression=dataset.sql or conditional_quote(dataset.table_name), + uuid=self.uuid, + database_id=self.database_id, + created_on=self.created_on, + created_by=self.created_by, + changed_by=self.changed_by, + changed_on=self.changed_on, + owners=self.owners, + name=self.table_name, + expression=self.sql or self.quote_identifier(self.table_name), tables=tables, columns=columns, - is_physical=not dataset.sql, - is_managed_externally=dataset.is_managed_externally, - external_url=dataset.external_url, + is_physical=not self.sql, + is_managed_externally=self.is_managed_externally, + external_url=self.external_url, ) session.add(new_dataset) @@ -2399,7 +2390,9 @@ def write_shadow_dataset( # pylint: disable=too-many-locals sa.event.listen(SqlaTable, "after_delete", SqlaTable.after_delete) sa.event.listen(SqlaTable, "after_update", SqlaTable.after_update) sa.event.listen(SqlMetric, "after_update", SqlaTable.update_table) +sa.event.listen(SqlMetric, "after_delete", SqlMetric.after_delete) sa.event.listen(TableColumn, "after_update", SqlaTable.update_table) +sa.event.listen(TableColumn, "after_delete", TableColumn.after_delete) RLSFilterRoles = Table( "rls_filter_roles", diff --git a/superset/connectors/sqla/utils.py b/superset/connectors/sqla/utils.py index f8ed7a956704a..1786c5bf17169 100644 --- a/superset/connectors/sqla/utils.py +++ b/superset/connectors/sqla/utils.py @@ -15,16 +15,28 @@ # specific language governing permissions and limitations # under the License. from contextlib import closing -from typing import Any, Callable, Dict, List, Optional, Set, TYPE_CHECKING +from typing import ( + Any, + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Type, + TYPE_CHECKING, + TypeVar, +) +from uuid import UUID import sqlparse from flask_babel import lazy_gettext as _ -from sqlalchemy import and_, or_ +from sqlalchemy.engine.url import URL as SqlaURL from sqlalchemy.exc import NoSuchTableError +from sqlalchemy.ext.declarative import DeclarativeMeta from sqlalchemy.orm import Session from sqlalchemy.sql.type_api import TypeEngine -from superset.columns.models import Column as NewColumn from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( SupersetGenericDBErrorException, @@ -32,9 +44,9 @@ ) from superset.models.core import Database from superset.result_set import SupersetResultSet -from superset.sql_parse import has_table_query, insert_rls, ParsedQuery, Table +from superset.sql_parse import has_table_query, insert_rls, ParsedQuery from superset.superset_typing import ResultSetColumnType -from superset.tables.models import Table as NewTable +from superset.utils.memoized import memoized if TYPE_CHECKING: from superset.connectors.sqla.models import SqlaTable @@ -168,75 +180,38 @@ def validate_adhoc_subquery( return ";\n".join(str(statement) for statement in statements) -def load_or_create_tables( # pylint: disable=too-many-arguments +@memoized +def get_dialect_name(drivername: str) -> str: + return SqlaURL(drivername).get_dialect().name + + +@memoized +def get_identifier_quoter(drivername: str) -> Dict[str, Callable[[str], str]]: + return SqlaURL(drivername).get_dialect()().identifier_preparer.quote + + +DeclarativeModel = TypeVar("DeclarativeModel", bound=DeclarativeMeta) + + +def find_cached_objects_in_session( session: Session, - database: Database, - default_schema: Optional[str], - tables: Set[Table], - conditional_quote: Callable[[str], str], -) -> List[NewTable]: + cls: Type[DeclarativeModel], + ids: Optional[Iterable[int]] = None, + uuids: Optional[Iterable[UUID]] = None, +) -> Iterator[DeclarativeModel]: + """Find known ORM instances in cached SQLA session states. + + :param session: a SQLA session + :param cls: a SQLA DeclarativeModel + :param ids: ids of the desired model instances (optional) + :param uuids: uuids of the desired instances, will be ignored if `ids` are provides """ - Load or create new table model instances. - """ - if not tables: - return [] - - # set the default schema in tables that don't have it - if default_schema: - fixed_tables = list(tables) - for i, table in enumerate(fixed_tables): - if table.schema is None: - fixed_tables[i] = Table(table.table, default_schema, table.catalog) - tables = set(fixed_tables) - - # load existing tables - predicate = or_( - *[ - and_( - NewTable.database_id == database.id, - NewTable.schema == table.schema, - NewTable.name == table.table, - ) - for table in tables - ] + if not ids and not uuids: + return iter([]) + uuids = uuids or [] + return ( + item + # `session` is an iterator of all known items + for item in set(session) + if isinstance(item, cls) and (item.id in ids if ids else item.uuid in uuids) ) - new_tables = session.query(NewTable).filter(predicate).all() - - # add missing tables - existing = {(table.schema, table.name) for table in new_tables} - for table in tables: - if (table.schema, table.table) not in existing: - try: - column_metadata = get_physical_table_metadata( - database=database, - table_name=table.table, - schema_name=table.schema, - ) - except Exception: # pylint: disable=broad-except - continue - columns = [ - NewColumn( - name=column["name"], - type=str(column["type"]), - expression=conditional_quote(column["name"]), - is_temporal=column["is_dttm"], - is_aggregation=False, - is_physical=True, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - ) - for column in column_metadata - ] - new_tables.append( - NewTable( - name=table.table, - schema=table.schema, - catalog=None, - database_id=database.id, - columns=columns, - ) - ) - existing.add((table.schema, table.table)) - - return new_tables diff --git a/superset/constants.py b/superset/constants.py index 2269bdc7b1362..8399aa457a882 100644 --- a/superset/constants.py +++ b/superset/constants.py @@ -100,7 +100,6 @@ class RouteMethod: # pylint: disable=too-few-public-methods MODEL_API_RW_METHOD_PERMISSION_MAP = { "bulk_delete": "write", - "created_by_me": "read", "delete": "write", "distinct": "read", "get": "read", diff --git a/superset/dao/base.py b/superset/dao/base.py index 607967e3041e2..0090c4e535e23 100644 --- a/superset/dao/base.py +++ b/superset/dao/base.py @@ -175,7 +175,7 @@ def update( def delete(cls, model: Model, commit: bool = True) -> Model: """ Generic delete a model - :raises: DAOCreateFailedError + :raises: DAODeleteFailedError """ try: db.session.delete(model) diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index fb9c36ca033b8..5e3f78a9536ae 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -140,7 +140,6 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "set_embedded", "delete_embedded", "thumbnail", - "created_by_me", } resource_name = "dashboard" allow_browser_login = True @@ -1191,5 +1190,6 @@ def delete_embedded(self, dashboard: Dashboard) -> Response: 500: $ref: '#/components/responses/500' """ - dashboard.embedded = [] + for embedded in dashboard.embedded: + DashboardDAO.delete(embedded) return self.response(200, message="OK") diff --git a/superset/dashboards/filters.py b/superset/dashboards/filters.py index 3bbef14f4cb0e..7b02c23679540 100644 --- a/superset/dashboards/filters.py +++ b/superset/dashboards/filters.py @@ -224,12 +224,14 @@ def apply(self, query: Query, value: Any) -> Query: return query.filter( and_( Dashboard.certified_by.isnot(None), + Dashboard.certified_by != "", ) ) if value is False: return query.filter( - and_( + or_( Dashboard.certified_by.is_(None), + Dashboard.certified_by == "", ) ) return query diff --git a/superset/databases/api.py b/superset/databases/api.py index e5817afb5d13b..ac497bf67dbde 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -169,10 +169,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi): edit_columns = add_columns - search_filters = { - "allow_file_upload": [DatabaseUploadEnabledFilter], - "expose_in_sqllab": [DatabaseFilter], - } + search_filters = {"allow_file_upload": [DatabaseUploadEnabledFilter]} list_select_columns = list_columns + ["extra", "sqlalchemy_uri", "password"] order_columns = [ diff --git a/superset/databases/filters.py b/superset/databases/filters.py index 228abbc3bfa81..86564e8f15a7e 100644 --- a/superset/databases/filters.py +++ b/superset/databases/filters.py @@ -69,8 +69,6 @@ class DatabaseUploadEnabledFilter(BaseFilter): # pylint: disable=too-few-public def apply(self, query: Query, value: Any) -> Query: filtered_query = query.filter(Database.allow_file_upload) - database_perms = security_manager.user_view_menu_names("database_access") - schema_access_databases = can_access_databases("schema_access") datasource_access_databases = can_access_databases("datasource_access") if hasattr(g, "user"): @@ -82,19 +80,10 @@ def apply(self, query: Query, value: Any) -> Query: if len(allowed_schemas): return filtered_query - filtered_query = filtered_query.filter( + return filtered_query.filter( or_( cast(Database.extra, JSON)["schemas_allowed_for_file_upload"] is not None, cast(Database.extra, JSON)["schemas_allowed_for_file_upload"] != [], ) ) - - return filtered_query.filter( - or_( - self.model.perm.in_(database_perms), - self.model.database_name.in_( - [*schema_access_databases, *datasource_access_databases] - ), - ) - ) diff --git a/superset/datasets/models.py b/superset/datasets/models.py index 56a6fbf4000e3..b433709f2c779 100644 --- a/superset/datasets/models.py +++ b/superset/datasets/models.py @@ -28,9 +28,11 @@ import sqlalchemy as sa from flask_appbuilder import Model -from sqlalchemy.orm import relationship +from sqlalchemy.orm import backref, relationship +from superset import security_manager from superset.columns.models import Column +from superset.models.core import Database from superset.models.helpers import ( AuditMixinNullable, ExtraJSONMixin, @@ -38,18 +40,33 @@ ) from superset.tables.models import Table -column_association_table = sa.Table( +dataset_column_association_table = sa.Table( "sl_dataset_columns", Model.metadata, # pylint: disable=no-member - sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), - sa.Column("column_id", sa.ForeignKey("sl_columns.id")), + sa.Column( + "dataset_id", + sa.ForeignKey("sl_datasets.id"), + primary_key=True, + ), + sa.Column( + "column_id", + sa.ForeignKey("sl_columns.id"), + primary_key=True, + ), ) -table_association_table = sa.Table( +dataset_table_association_table = sa.Table( "sl_dataset_tables", Model.metadata, # pylint: disable=no-member - sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), - sa.Column("table_id", sa.ForeignKey("sl_tables.id")), + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id"), primary_key=True), + sa.Column("table_id", sa.ForeignKey("sl_tables.id"), primary_key=True), +) + +dataset_user_association_table = sa.Table( + "sl_dataset_users", + Model.metadata, # pylint: disable=no-member + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id"), primary_key=True), + sa.Column("user_id", sa.ForeignKey("ab_user.id"), primary_key=True), ) @@ -61,10 +78,34 @@ class Dataset(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): __tablename__ = "sl_datasets" id = sa.Column(sa.Integer, primary_key=True) + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + database: Database = relationship( + "Database", + backref=backref("datasets", cascade="all, delete-orphan"), + foreign_keys=[database_id], + ) + # The relationship between datasets and columns is 1:n, but we use a + # many-to-many association table to avoid adding two mutually exclusive + # columns(dataset_id and table_id) to Column + columns: List[Column] = relationship( + "Column", + secondary=dataset_column_association_table, + cascade="all, delete-orphan", + single_parent=True, + backref="datasets", + ) + owners = relationship( + security_manager.user_model, secondary=dataset_user_association_table + ) + tables: List[Table] = relationship( + "Table", secondary=dataset_table_association_table, backref="datasets" + ) + + # Does the dataset point directly to a ``Table``? + is_physical = sa.Column(sa.Boolean, default=False) - # A temporary column, used for shadow writing to the new model. Once the ``SqlaTable`` - # model has been deleted this column can be removed. - sqlatable_id = sa.Column(sa.Integer, nullable=True, unique=True) + # Column is managed externally and should be read-only inside Superset + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) # We use ``sa.Text`` for these attributes because (1) in modern databases the # performance is the same as ``VARCHAR``[1] and (2) because some table names can be @@ -72,21 +113,8 @@ class Dataset(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): # # [1] https://www.postgresql.org/docs/9.1/datatype-character.html name = sa.Column(sa.Text) - expression = sa.Column(sa.Text) - - # n:n relationship - tables: List[Table] = relationship("Table", secondary=table_association_table) - - # The relationship between datasets and columns is 1:n, but we use a many-to-many - # association to differentiate between the relationship between tables and columns. - columns: List[Column] = relationship( - "Column", secondary=column_association_table, cascade="all, delete" - ) - - # Does the dataset point directly to a ``Table``? - is_physical = sa.Column(sa.Boolean, default=False) - - # Column is managed externally and should be read-only inside Superset - is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) external_url = sa.Column(sa.Text, nullable=True) + + def __repr__(self) -> str: + return f"" diff --git a/superset/db_engine_specs/gsheets.py b/superset/db_engine_specs/gsheets.py index 888513f518482..94c4cf424b0b3 100644 --- a/superset/db_engine_specs/gsheets.py +++ b/superset/db_engine_specs/gsheets.py @@ -216,7 +216,11 @@ def validate_parameters( except Exception: # pylint: disable=broad-except errors.append( SupersetError( - message="URL could not be identified", + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, level=ErrorLevel.WARNING, extra={"catalog": {"idx": idx, "url": True}}, diff --git a/superset/db_engine_specs/pinot.py b/superset/db_engine_specs/pinot.py index 051f42501f929..38e30accecbc0 100644 --- a/superset/db_engine_specs/pinot.py +++ b/superset/db_engine_specs/pinot.py @@ -33,6 +33,10 @@ class PinotEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method _time_grain_expressions: Dict[Optional[str], str] = { "PT1S": "1:SECONDS", "PT1M": "1:MINUTES", + "PT5M": "5:MINUTES", + "PT10M": "10:MINUTES", + "PT15M": "15:MINUTES", + "PT30M": "30:MINUTES", "PT1H": "1:HOURS", "P1D": "1:DAYS", "P1W": "week", @@ -53,6 +57,10 @@ class PinotEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method _use_date_trunc_function: Dict[str, bool] = { "PT1S": False, "PT1M": False, + "PT5M": False, + "PT10M": False, + "PT15M": False, + "PT30M": False, "PT1H": False, "P1D": False, "P1W": True, diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 8675607848328..645dd32d26424 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -47,9 +47,11 @@ from superset.models.sql_lab import Query from superset.models.sql_types.presto_sql_types import ( Array, + Date, Interval, Map, Row, + TimeStamp, TinyInteger, ) from superset.result_set import destringify @@ -1096,10 +1098,18 @@ def where_latest_partition( # pylint: disable=too-many-arguments if values is None: return None - column_names = {column.get("name") for column in columns or []} + column_type_by_name = { + column.get("name"): column.get("type") for column in columns or [] + } + for col_name, value in zip(col_names, values): - if col_name in column_names: - query = query.where(Column(col_name) == value) + if col_name in column_type_by_name: + if column_type_by_name.get(col_name) == "TIMESTAMP": + query = query.where(Column(col_name, TimeStamp()) == value) + elif column_type_by_name.get(col_name) == "DATE": + query = query.where(Column(col_name, Date()) == value) + else: + query = query.where(Column(col_name) == value) return query @classmethod diff --git a/tests/unit_tests/migrations/__init__.py b/superset/embedded_dashboard/__init__.py similarity index 100% rename from tests/unit_tests/migrations/__init__.py rename to superset/embedded_dashboard/__init__.py diff --git a/tests/unit_tests/migrations/shared/__init__.py b/superset/embedded_dashboard/commands/__init__.py similarity index 100% rename from tests/unit_tests/migrations/shared/__init__.py rename to superset/embedded_dashboard/commands/__init__.py diff --git a/superset/examples/birth_names.py b/superset/examples/birth_names.py index 1380958b2ad4a..8d7c02799dd57 100644 --- a/superset/examples/birth_names.py +++ b/superset/examples/birth_names.py @@ -135,23 +135,26 @@ def _set_table_metadata(datasource: SqlaTable, database: "Database") -> None: def _add_table_metrics(datasource: SqlaTable) -> None: - if not any(col.column_name == "num_california" for col in datasource.columns): + # By accessing the attribute first, we make sure `datasource.columns` and + # `datasource.metrics` are already loaded. Otherwise accessing them later + # may trigger an unnecessary and unexpected `after_update` event. + columns, metrics = datasource.columns, datasource.metrics + + if not any(col.column_name == "num_california" for col in columns): col_state = str(column("state").compile(db.engine)) col_num = str(column("num").compile(db.engine)) - datasource.columns.append( + columns.append( TableColumn( column_name="num_california", expression=f"CASE WHEN {col_state} = 'CA' THEN {col_num} ELSE 0 END", ) ) - if not any(col.metric_name == "sum__num" for col in datasource.metrics): + if not any(col.metric_name == "sum__num" for col in metrics): col = str(column("num").compile(db.engine)) - datasource.metrics.append( - SqlMetric(metric_name="sum__num", expression=f"SUM({col})") - ) + metrics.append(SqlMetric(metric_name="sum__num", expression=f"SUM({col})")) - for col in datasource.columns: + for col in columns: if col.column_name == "ds": col.is_dttm = True break diff --git a/superset/key_value/shared_entries.py b/superset/key_value/shared_entries.py index 5dda89a7b3163..5f4ded949808c 100644 --- a/superset/key_value/shared_entries.py +++ b/superset/key_value/shared_entries.py @@ -20,7 +20,6 @@ from superset.key_value.types import KeyValueResource, SharedKey from superset.key_value.utils import get_uuid_namespace, random_key -from superset.utils.memoized import memoized RESOURCE = KeyValueResource.APP NAMESPACE = get_uuid_namespace("") @@ -42,7 +41,6 @@ def set_shared_value(key: SharedKey, value: Any) -> None: CreateKeyValueCommand(resource=RESOURCE, value=value, key=uuid_key).run() -@memoized def get_permalink_salt(key: SharedKey) -> str: salt = get_shared_value(key) if salt is None: diff --git a/superset/migrations/shared/utils.py b/superset/migrations/shared/utils.py index c54de83c42af0..4b0c4e1440dd5 100644 --- a/superset/migrations/shared/utils.py +++ b/superset/migrations/shared/utils.py @@ -15,42 +15,22 @@ # specific language governing permissions and limitations # under the License. import logging -from typing import Any, Iterator, Optional, Set +import os +import time +from typing import Any +from uuid import uuid4 from alembic import op from sqlalchemy import engine_from_config +from sqlalchemy.dialects.mysql.base import MySQLDialect +from sqlalchemy.dialects.postgresql.base import PGDialect from sqlalchemy.engine import reflection from sqlalchemy.exc import NoSuchTableError +from sqlalchemy.orm import Session -try: - from sqloxide import parse_sql -except ImportError: - parse_sql = None +logger = logging.getLogger(__name__) -from superset.sql_parse import ParsedQuery, Table - -logger = logging.getLogger("alembic") - - -# mapping between sqloxide and SQLAlchemy dialects -sqloxide_dialects = { - "ansi": {"trino", "trinonative", "presto"}, - "hive": {"hive", "databricks"}, - "ms": {"mssql"}, - "mysql": {"mysql"}, - "postgres": { - "cockroachdb", - "hana", - "netezza", - "postgres", - "postgresql", - "redshift", - "vertica", - }, - "snowflake": {"snowflake"}, - "sqlite": {"sqlite", "gsheets", "shillelagh"}, - "clickhouse": {"clickhouse"}, -} +DEFAULT_BATCH_SIZE = int(os.environ.get("BATCH_SIZE", 1000)) def table_has_column(table: str, column: str) -> bool: @@ -61,7 +41,6 @@ def table_has_column(table: str, column: str) -> bool: :param column: A column name :returns: True iff the column exists in the table """ - config = op.get_context().config engine = engine_from_config( config.get_section(config.config_ini_section), prefix="sqlalchemy." @@ -73,42 +52,44 @@ def table_has_column(table: str, column: str) -> bool: return False -def find_nodes_by_key(element: Any, target: str) -> Iterator[Any]: - """ - Find all nodes in a SQL tree matching a given key. - """ - if isinstance(element, list): - for child in element: - yield from find_nodes_by_key(child, target) - elif isinstance(element, dict): - for key, value in element.items(): - if key == target: - yield value - else: - yield from find_nodes_by_key(value, target) - - -def extract_table_references(sql_text: str, sqla_dialect: str) -> Set[Table]: - """ - Return all the dependencies from a SQL sql_text. - """ - if not parse_sql: - parsed = ParsedQuery(sql_text) - return parsed.tables +uuid_by_dialect = { + MySQLDialect: "UNHEX(REPLACE(CONVERT(UUID() using utf8mb4), '-', ''))", + PGDialect: "uuid_in(md5(random()::text || clock_timestamp()::text)::cstring)", +} - dialect = "generic" - for dialect, sqla_dialects in sqloxide_dialects.items(): - if sqla_dialect in sqla_dialects: - break - try: - tree = parse_sql(sql_text, dialect=dialect) - except Exception: # pylint: disable=broad-except - logger.warning("Unable to parse query with sqloxide: %s", sql_text) - # fallback to sqlparse - parsed = ParsedQuery(sql_text) - return parsed.tables - return { - Table(*[part["value"] for part in table["name"][::-1]]) - for table in find_nodes_by_key(tree, "Table") - } +def assign_uuids( + model: Any, session: Session, batch_size: int = DEFAULT_BATCH_SIZE +) -> None: + """Generate new UUIDs for all rows in a table""" + bind = op.get_bind() + table_name = model.__tablename__ + count = session.query(model).count() + # silently skip if the table is empty (suitable for db initialization) + if count == 0: + return + + start_time = time.time() + print(f"\nAdding uuids for `{table_name}`...") + # Use dialect specific native SQL queries if possible + for dialect, sql in uuid_by_dialect.items(): + if isinstance(bind.dialect, dialect): + op.execute( + f"UPDATE {dialect().identifier_preparer.quote(table_name)} SET uuid = {sql}" + ) + print(f"Done. Assigned {count} uuids in {time.time() - start_time:.3f}s.\n") + return + + # Othwewise Use Python uuid function + start = 0 + while start < count: + end = min(start + batch_size, count) + for obj in session.query(model)[start:end]: + obj.uuid = uuid4() + session.merge(obj) + session.commit() + if start + batch_size < count: + print(f" uuid assigned to {end} out of {count}\r", end="") + start += batch_size + + print(f"Done. Assigned {count} uuids in {time.time() - start_time:.3f}s.\n") diff --git a/superset/migrations/versions/2ed890b36b94_rm_time_range_endpoints_from_qc.py b/superset/migrations/versions/2ed890b36b94_rm_time_range_endpoints_from_qc.py index 42d73bc33d335..e4e4718a41173 100644 --- a/superset/migrations/versions/2ed890b36b94_rm_time_range_endpoints_from_qc.py +++ b/superset/migrations/versions/2ed890b36b94_rm_time_range_endpoints_from_qc.py @@ -26,40 +26,9 @@ revision = "2ed890b36b94" down_revision = "58df9d617f14" -import json - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.ext.declarative import declarative_base - -from superset import db - -Base = declarative_base() - - -class Slice(Base): - __tablename__ = "slices" - id = sa.Column(sa.Integer, primary_key=True) - query_context = sa.Column(sa.Text) - def upgrade(): - bind = op.get_bind() - session = db.Session(bind=bind) - for slc in session.query(Slice).filter( - Slice.query_context.like("%time_range_endpoints%") - ): - try: - query_context = json.loads(slc.query_context) - except json.decoder.JSONDecodeError: - continue - queries = query_context.get("queries") - for query in queries: - query.get("extras", {}).pop("time_range_endpoints", None) - slc.queries = json.dumps(queries) - - session.commit() - session.close() + pass def downgrade(): diff --git a/superset/migrations/versions/6766938c6065_add_key_value_store.py b/superset/migrations/versions/6766938c6065_add_key_value_store.py index 0a756386aee98..26b1d28e0d49b 100644 --- a/superset/migrations/versions/6766938c6065_add_key_value_store.py +++ b/superset/migrations/versions/6766938c6065_add_key_value_store.py @@ -38,7 +38,7 @@ def upgrade(): "key_value", sa.Column("id", sa.Integer(), nullable=False), sa.Column("resource", sa.String(32), nullable=False), - sa.Column("value", sa.LargeBinary(), nullable=False), + sa.Column("value", sa.LargeBinary(length=2**31), nullable=False), sa.Column("uuid", UUIDType(binary=True), default=uuid4), sa.Column("created_on", sa.DateTime(), nullable=True), sa.Column("created_by_fk", sa.Integer(), nullable=True), diff --git a/superset/migrations/versions/96e99fb176a0_add_import_mixing_to_saved_query.py b/superset/migrations/versions/96e99fb176a0_add_import_mixing_to_saved_query.py index 57d22aa089aa2..f93deb1d0c950 100644 --- a/superset/migrations/versions/96e99fb176a0_add_import_mixing_to_saved_query.py +++ b/superset/migrations/versions/96e99fb176a0_add_import_mixing_to_saved_query.py @@ -32,9 +32,7 @@ from sqlalchemy_utils import UUIDType from superset import db -from superset.migrations.versions.b56500de1855_add_uuid_column_to_import_mixin import ( - add_uuids, -) +from superset.migrations.shared.utils import assign_uuids # revision identifiers, used by Alembic. revision = "96e99fb176a0" @@ -75,7 +73,7 @@ def upgrade(): # Ignore column update errors so that we can run upgrade multiple times pass - add_uuids(SavedQuery, "saved_query", session) + assign_uuids(SavedQuery, session) try: # Add uniqueness constraint diff --git a/superset/migrations/versions/9d8a8d575284_.py b/superset/migrations/versions/9d8a8d575284_.py index daa84a2ad0647..fbbfac231b0e8 100644 --- a/superset/migrations/versions/9d8a8d575284_.py +++ b/superset/migrations/versions/9d8a8d575284_.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""empty message +"""merge point Revision ID: 9d8a8d575284 Revises: ('8b841273bec3', 'b0d0249074e4') diff --git a/superset/migrations/versions/a9422eeaae74_new_dataset_models_take_2.py b/superset/migrations/versions/a9422eeaae74_new_dataset_models_take_2.py new file mode 100644 index 0000000000000..9a2498bfa8590 --- /dev/null +++ b/superset/migrations/versions/a9422eeaae74_new_dataset_models_take_2.py @@ -0,0 +1,902 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""new_dataset_models_take_2 + +Revision ID: a9422eeaae74 +Revises: ad07e4fdbaba +Create Date: 2022-04-01 14:38:09.499483 + +""" + +# revision identifiers, used by Alembic. +revision = "a9422eeaae74" +down_revision = "ad07e4fdbaba" + +import json +import os +from datetime import datetime +from typing import List, Optional, Set, Type, Union +from uuid import uuid4 + +import sqlalchemy as sa +from alembic import op +from sqlalchemy import select +from sqlalchemy.ext.declarative import declarative_base, declared_attr +from sqlalchemy.orm import backref, relationship, Session +from sqlalchemy.schema import UniqueConstraint +from sqlalchemy.sql import functions as func +from sqlalchemy.sql.expression import and_, or_ +from sqlalchemy_utils import UUIDType + +from superset.connectors.sqla.models import ADDITIVE_METRIC_TYPES_LOWER +from superset.connectors.sqla.utils import get_dialect_name, get_identifier_quoter +from superset.extensions import encrypted_field_factory +from superset.migrations.shared.utils import assign_uuids +from superset.sql_parse import extract_table_references, Table +from superset.utils.core import MediumText + +Base = declarative_base() +SHOW_PROGRESS = os.environ.get("SHOW_PROGRESS") == "1" +UNKNOWN_TYPE = "UNKNOWN" + + +user_table = sa.Table( + "ab_user", Base.metadata, sa.Column("id", sa.Integer(), primary_key=True) +) + + +class UUIDMixin: + uuid = sa.Column( + UUIDType(binary=True), primary_key=False, unique=True, default=uuid4 + ) + + +class AuxiliaryColumnsMixin(UUIDMixin): + """ + Auxiliary columns, a combination of columns added by + AuditMixinNullable + ImportExportMixin + """ + + created_on = sa.Column(sa.DateTime, default=datetime.now, nullable=True) + changed_on = sa.Column( + sa.DateTime, default=datetime.now, onupdate=datetime.now, nullable=True + ) + + @declared_attr + def created_by_fk(cls): + return sa.Column(sa.Integer, sa.ForeignKey("ab_user.id"), nullable=True) + + @declared_attr + def changed_by_fk(cls): + return sa.Column(sa.Integer, sa.ForeignKey("ab_user.id"), nullable=True) + + +def insert_from_select( + target: Union[str, sa.Table, Type[Base]], source: sa.sql.expression.Select +) -> None: + """ + Execute INSERT FROM SELECT to copy data from a SELECT query to the target table. + """ + if isinstance(target, sa.Table): + target_table = target + elif hasattr(target, "__tablename__"): + target_table: sa.Table = Base.metadata.tables[target.__tablename__] + else: + target_table: sa.Table = Base.metadata.tables[target] + cols = [col.name for col in source.columns if col.name in target_table.columns] + query = target_table.insert().from_select(cols, source) + return op.execute(query) + + +class Database(Base): + + __tablename__ = "dbs" + __table_args__ = (UniqueConstraint("database_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + database_name = sa.Column(sa.String(250), unique=True, nullable=False) + sqlalchemy_uri = sa.Column(sa.String(1024), nullable=False) + password = sa.Column(encrypted_field_factory.create(sa.String(1024))) + impersonate_user = sa.Column(sa.Boolean, default=False) + encrypted_extra = sa.Column(encrypted_field_factory.create(sa.Text), nullable=True) + extra = sa.Column(sa.Text) + server_cert = sa.Column(encrypted_field_factory.create(sa.Text), nullable=True) + + +class TableColumn(AuxiliaryColumnsMixin, Base): + + __tablename__ = "table_columns" + __table_args__ = (UniqueConstraint("table_id", "column_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + table_id = sa.Column(sa.Integer, sa.ForeignKey("tables.id")) + is_active = sa.Column(sa.Boolean, default=True) + extra = sa.Column(sa.Text) + column_name = sa.Column(sa.String(255), nullable=False) + type = sa.Column(sa.String(32)) + expression = sa.Column(MediumText()) + description = sa.Column(MediumText()) + is_dttm = sa.Column(sa.Boolean, default=False) + filterable = sa.Column(sa.Boolean, default=True) + groupby = sa.Column(sa.Boolean, default=True) + verbose_name = sa.Column(sa.String(1024)) + python_date_format = sa.Column(sa.String(255)) + + +class SqlMetric(AuxiliaryColumnsMixin, Base): + + __tablename__ = "sql_metrics" + __table_args__ = (UniqueConstraint("table_id", "metric_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + table_id = sa.Column(sa.Integer, sa.ForeignKey("tables.id")) + extra = sa.Column(sa.Text) + metric_type = sa.Column(sa.String(32)) + metric_name = sa.Column(sa.String(255), nullable=False) + expression = sa.Column(MediumText(), nullable=False) + warning_text = sa.Column(MediumText()) + description = sa.Column(MediumText()) + d3format = sa.Column(sa.String(128)) + verbose_name = sa.Column(sa.String(1024)) + + +sqlatable_user_table = sa.Table( + "sqlatable_user", + Base.metadata, + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("user_id", sa.Integer, sa.ForeignKey("ab_user.id")), + sa.Column("table_id", sa.Integer, sa.ForeignKey("tables.id")), +) + + +class SqlaTable(AuxiliaryColumnsMixin, Base): + + __tablename__ = "tables" + __table_args__ = (UniqueConstraint("database_id", "schema", "table_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + extra = sa.Column(sa.Text) + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + database: Database = relationship( + "Database", + backref=backref("tables", cascade="all, delete-orphan"), + foreign_keys=[database_id], + ) + schema = sa.Column(sa.String(255)) + table_name = sa.Column(sa.String(250), nullable=False) + sql = sa.Column(MediumText()) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) + + +table_column_association_table = sa.Table( + "sl_table_columns", + Base.metadata, + sa.Column("table_id", sa.ForeignKey("sl_tables.id"), primary_key=True), + sa.Column("column_id", sa.ForeignKey("sl_columns.id"), primary_key=True), +) + +dataset_column_association_table = sa.Table( + "sl_dataset_columns", + Base.metadata, + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id"), primary_key=True), + sa.Column("column_id", sa.ForeignKey("sl_columns.id"), primary_key=True), +) + +dataset_table_association_table = sa.Table( + "sl_dataset_tables", + Base.metadata, + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id"), primary_key=True), + sa.Column("table_id", sa.ForeignKey("sl_tables.id"), primary_key=True), +) + +dataset_user_association_table = sa.Table( + "sl_dataset_users", + Base.metadata, + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id"), primary_key=True), + sa.Column("user_id", sa.ForeignKey("ab_user.id"), primary_key=True), +) + + +class NewColumn(AuxiliaryColumnsMixin, Base): + + __tablename__ = "sl_columns" + + id = sa.Column(sa.Integer, primary_key=True) + # A temporary column to link physical columns with tables so we don't + # have to insert a record in the relationship table while creating new columns. + table_id = sa.Column(sa.Integer, nullable=True) + + is_aggregation = sa.Column(sa.Boolean, nullable=False, default=False) + is_additive = sa.Column(sa.Boolean, nullable=False, default=False) + is_dimensional = sa.Column(sa.Boolean, nullable=False, default=False) + is_filterable = sa.Column(sa.Boolean, nullable=False, default=True) + is_increase_desired = sa.Column(sa.Boolean, nullable=False, default=True) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + is_partition = sa.Column(sa.Boolean, nullable=False, default=False) + is_physical = sa.Column(sa.Boolean, nullable=False, default=False) + is_temporal = sa.Column(sa.Boolean, nullable=False, default=False) + is_spatial = sa.Column(sa.Boolean, nullable=False, default=False) + + name = sa.Column(sa.Text) + type = sa.Column(sa.Text) + unit = sa.Column(sa.Text) + expression = sa.Column(MediumText()) + description = sa.Column(MediumText()) + warning_text = sa.Column(MediumText()) + external_url = sa.Column(sa.Text, nullable=True) + extra_json = sa.Column(MediumText(), default="{}") + + +class NewTable(AuxiliaryColumnsMixin, Base): + + __tablename__ = "sl_tables" + + id = sa.Column(sa.Integer, primary_key=True) + # A temporary column to keep the link between NewTable to SqlaTable + sqlatable_id = sa.Column(sa.Integer, primary_key=False, nullable=True, unique=True) + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + catalog = sa.Column(sa.Text) + schema = sa.Column(sa.Text) + name = sa.Column(sa.Text) + external_url = sa.Column(sa.Text, nullable=True) + extra_json = sa.Column(MediumText(), default="{}") + database: Database = relationship( + "Database", + backref=backref("new_tables", cascade="all, delete-orphan"), + foreign_keys=[database_id], + ) + + +class NewDataset(Base, AuxiliaryColumnsMixin): + + __tablename__ = "sl_datasets" + + id = sa.Column(sa.Integer, primary_key=True) + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + is_physical = sa.Column(sa.Boolean, default=False) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + name = sa.Column(sa.Text) + expression = sa.Column(MediumText()) + external_url = sa.Column(sa.Text, nullable=True) + extra_json = sa.Column(MediumText(), default="{}") + + +def find_tables( + session: Session, + database_id: int, + default_schema: Optional[str], + tables: Set[Table], +) -> List[int]: + """ + Look for NewTable's of from a specific database + """ + if not tables: + return [] + + predicate = or_( + *[ + and_( + NewTable.database_id == database_id, + NewTable.schema == (table.schema or default_schema), + NewTable.name == table.table, + ) + for table in tables + ] + ) + return session.query(NewTable.id).filter(predicate).all() + + +# helper SQLA elements for easier querying +is_physical_table = or_(SqlaTable.sql.is_(None), SqlaTable.sql == "") +is_physical_column = or_(TableColumn.expression.is_(None), TableColumn.expression == "") + +# filtering out table columns with valid associated SqlTable +active_table_columns = sa.join( + TableColumn, + SqlaTable, + TableColumn.table_id == SqlaTable.id, +) +active_metrics = sa.join(SqlMetric, SqlaTable, SqlMetric.table_id == SqlaTable.id) + + +def copy_tables(session: Session) -> None: + """Copy Physical tables""" + count = session.query(SqlaTable).filter(is_physical_table).count() + if not count: + return + print(f">> Copy {count:,} physical tables to sl_tables...") + insert_from_select( + NewTable, + select( + [ + # Tables need different uuid than datasets, since they are different + # entities. When INSERT FROM SELECT, we must provide a value for `uuid`, + # otherwise it'd use the default generated on Python side, which + # will cause duplicate values. They will be replaced by `assign_uuids` later. + SqlaTable.uuid, + SqlaTable.id.label("sqlatable_id"), + SqlaTable.created_on, + SqlaTable.changed_on, + SqlaTable.created_by_fk, + SqlaTable.changed_by_fk, + SqlaTable.table_name.label("name"), + SqlaTable.schema, + SqlaTable.database_id, + SqlaTable.is_managed_externally, + SqlaTable.external_url, + ] + ) + # use an inner join to filter out only tables with valid database ids + .select_from( + sa.join(SqlaTable, Database, SqlaTable.database_id == Database.id) + ).where(is_physical_table), + ) + + +def copy_datasets(session: Session) -> None: + """Copy all datasets""" + count = session.query(SqlaTable).count() + if not count: + return + print(f">> Copy {count:,} SqlaTable to sl_datasets...") + insert_from_select( + NewDataset, + select( + [ + SqlaTable.uuid, + SqlaTable.created_on, + SqlaTable.changed_on, + SqlaTable.created_by_fk, + SqlaTable.changed_by_fk, + SqlaTable.database_id, + SqlaTable.table_name.label("name"), + func.coalesce(SqlaTable.sql, SqlaTable.table_name).label("expression"), + is_physical_table.label("is_physical"), + SqlaTable.is_managed_externally, + SqlaTable.external_url, + SqlaTable.extra.label("extra_json"), + ] + ), + ) + + print(" Copy dataset owners...") + insert_from_select( + dataset_user_association_table, + select( + [NewDataset.id.label("dataset_id"), sqlatable_user_table.c.user_id] + ).select_from( + sqlatable_user_table.join( + SqlaTable, SqlaTable.id == sqlatable_user_table.c.table_id + ).join(NewDataset, NewDataset.uuid == SqlaTable.uuid) + ), + ) + + print(" Link physical datasets with tables...") + insert_from_select( + dataset_table_association_table, + select( + [ + NewDataset.id.label("dataset_id"), + NewTable.id.label("table_id"), + ] + ).select_from( + sa.join(SqlaTable, NewTable, NewTable.sqlatable_id == SqlaTable.id).join( + NewDataset, NewDataset.uuid == SqlaTable.uuid + ) + ), + ) + + +def copy_columns(session: Session) -> None: + """Copy columns with active associated SqlTable""" + count = session.query(TableColumn).select_from(active_table_columns).count() + if not count: + return + print(f">> Copy {count:,} table columns to sl_columns...") + insert_from_select( + NewColumn, + select( + [ + TableColumn.uuid, + TableColumn.created_on, + TableColumn.changed_on, + TableColumn.created_by_fk, + TableColumn.changed_by_fk, + TableColumn.groupby.label("is_dimensional"), + TableColumn.filterable.label("is_filterable"), + TableColumn.column_name.label("name"), + TableColumn.description, + func.coalesce(TableColumn.expression, TableColumn.column_name).label( + "expression" + ), + sa.literal(False).label("is_aggregation"), + is_physical_column.label("is_physical"), + func.coalesce(TableColumn.is_dttm, False).label("is_temporal"), + func.coalesce(TableColumn.type, UNKNOWN_TYPE).label("type"), + TableColumn.extra.label("extra_json"), + ] + ).select_from(active_table_columns), + ) + + joined_columns_table = active_table_columns.join( + NewColumn, TableColumn.uuid == NewColumn.uuid + ) + print(" Link all columns to sl_datasets...") + insert_from_select( + dataset_column_association_table, + select( + [ + NewDataset.id.label("dataset_id"), + NewColumn.id.label("column_id"), + ], + ).select_from( + joined_columns_table.join(NewDataset, NewDataset.uuid == SqlaTable.uuid) + ), + ) + + +def copy_metrics(session: Session) -> None: + """Copy metrics as virtual columns""" + metrics_count = session.query(SqlMetric).select_from(active_metrics).count() + if not metrics_count: + return + + print(f">> Copy {metrics_count:,} metrics to sl_columns...") + insert_from_select( + NewColumn, + select( + [ + SqlMetric.uuid, + SqlMetric.created_on, + SqlMetric.changed_on, + SqlMetric.created_by_fk, + SqlMetric.changed_by_fk, + SqlMetric.metric_name.label("name"), + SqlMetric.expression, + SqlMetric.description, + sa.literal(UNKNOWN_TYPE).label("type"), + ( + func.coalesce( + sa.func.lower(SqlMetric.metric_type).in_( + ADDITIVE_METRIC_TYPES_LOWER + ), + sa.literal(False), + ).label("is_additive") + ), + sa.literal(True).label("is_aggregation"), + # metrics are by default not filterable + sa.literal(False).label("is_filterable"), + sa.literal(False).label("is_dimensional"), + sa.literal(False).label("is_physical"), + sa.literal(False).label("is_temporal"), + SqlMetric.extra.label("extra_json"), + SqlMetric.warning_text, + ] + ).select_from(active_metrics), + ) + + print(" Link metric columns to datasets...") + insert_from_select( + dataset_column_association_table, + select( + [ + NewDataset.id.label("dataset_id"), + NewColumn.id.label("column_id"), + ], + ).select_from( + active_metrics.join(NewDataset, NewDataset.uuid == SqlaTable.uuid).join( + NewColumn, NewColumn.uuid == SqlMetric.uuid + ) + ), + ) + + +def postprocess_datasets(session: Session) -> None: + """ + Postprocess datasets after insertion to + - Quote table names for physical datasets (if needed) + - Link referenced tables to virtual datasets + """ + total = session.query(SqlaTable).count() + if not total: + return + + offset = 0 + limit = 10000 + + joined_tables = sa.join( + NewDataset, + SqlaTable, + NewDataset.uuid == SqlaTable.uuid, + ).join( + Database, + Database.id == SqlaTable.database_id, + isouter=True, + ) + assert session.query(func.count()).select_from(joined_tables).scalar() == total + + print(f">> Run postprocessing on {total} datasets") + + update_count = 0 + + def print_update_count(): + if SHOW_PROGRESS: + print( + f" Will update {update_count} datasets" + " " * 20, + end="\r", + ) + + while offset < total: + print( + f" Process dataset {offset + 1}~{min(total, offset + limit)}..." + + " " * 30 + ) + for ( + database_id, + dataset_id, + expression, + extra, + is_physical, + schema, + sqlalchemy_uri, + ) in session.execute( + select( + [ + NewDataset.database_id, + NewDataset.id.label("dataset_id"), + NewDataset.expression, + SqlaTable.extra, + NewDataset.is_physical, + SqlaTable.schema, + Database.sqlalchemy_uri, + ] + ) + .select_from(joined_tables) + .offset(offset) + .limit(limit) + ): + drivername = (sqlalchemy_uri or "").split("://")[0] + updates = {} + updated = False + if is_physical and drivername and expression: + quoted_expression = get_identifier_quoter(drivername)(expression) + if quoted_expression != expression: + updates["expression"] = quoted_expression + + # add schema name to `dataset.extra_json` so we don't have to join + # tables in order to use datasets + if schema: + try: + extra_json = json.loads(extra) if extra else {} + except json.decoder.JSONDecodeError: + extra_json = {} + extra_json["schema"] = schema + updates["extra_json"] = json.dumps(extra_json) + + if updates: + session.execute( + sa.update(NewDataset) + .where(NewDataset.id == dataset_id) + .values(**updates) + ) + updated = True + + if not is_physical and expression: + table_refrences = extract_table_references( + expression, get_dialect_name(drivername), show_warning=False + ) + found_tables = find_tables( + session, database_id, schema, table_refrences + ) + if found_tables: + op.bulk_insert( + dataset_table_association_table, + [ + {"dataset_id": dataset_id, "table_id": table.id} + for table in found_tables + ], + ) + updated = True + + if updated: + update_count += 1 + print_update_count() + + session.flush() + offset += limit + + if SHOW_PROGRESS: + print("") + + +def postprocess_columns(session: Session) -> None: + """ + At this step, we will + - Add engine specific quotes to `expression` of physical columns + - Tuck some extra metadata to `extra_json` + """ + total = session.query(NewColumn).count() + if not total: + return + + def get_joined_tables(offset, limit): + return ( + sa.join( + session.query(NewColumn) + .offset(offset) + .limit(limit) + .subquery("sl_columns"), + dataset_column_association_table, + dataset_column_association_table.c.column_id == NewColumn.id, + ) + .join( + NewDataset, + NewDataset.id == dataset_column_association_table.c.dataset_id, + ) + .join( + dataset_table_association_table, + # Join tables with physical datasets + and_( + NewDataset.is_physical, + dataset_table_association_table.c.dataset_id == NewDataset.id, + ), + isouter=True, + ) + .join(Database, Database.id == NewDataset.database_id) + .join( + TableColumn, + TableColumn.uuid == NewColumn.uuid, + isouter=True, + ) + .join( + SqlMetric, + SqlMetric.uuid == NewColumn.uuid, + isouter=True, + ) + ) + + offset = 0 + limit = 100000 + + print(f">> Run postprocessing on {total:,} columns") + + update_count = 0 + + def print_update_count(): + if SHOW_PROGRESS: + print( + f" Will update {update_count} columns" + " " * 20, + end="\r", + ) + + while offset < total: + query = ( + select( + # sorted alphabetically + [ + NewColumn.id.label("column_id"), + TableColumn.column_name, + NewColumn.changed_by_fk, + NewColumn.changed_on, + NewColumn.created_on, + NewColumn.description, + SqlMetric.d3format, + NewDataset.external_url, + NewColumn.extra_json, + NewColumn.is_dimensional, + NewColumn.is_filterable, + NewDataset.is_managed_externally, + NewColumn.is_physical, + SqlMetric.metric_type, + TableColumn.python_date_format, + Database.sqlalchemy_uri, + dataset_table_association_table.c.table_id, + func.coalesce( + TableColumn.verbose_name, SqlMetric.verbose_name + ).label("verbose_name"), + NewColumn.warning_text, + ] + ) + .select_from(get_joined_tables(offset, limit)) + .where( + # pre-filter to columns with potential updates + or_( + NewColumn.is_physical, + TableColumn.verbose_name.isnot(None), + TableColumn.verbose_name.isnot(None), + SqlMetric.verbose_name.isnot(None), + SqlMetric.d3format.isnot(None), + SqlMetric.metric_type.isnot(None), + ) + ) + ) + + start = offset + 1 + end = min(total, offset + limit) + count = session.query(func.count()).select_from(query).scalar() + print(f" [Column {start:,} to {end:,}] {count:,} may be updated") + + physical_columns = [] + + for ( + # sorted alphabetically + column_id, + column_name, + changed_by_fk, + changed_on, + created_on, + description, + d3format, + external_url, + extra_json, + is_dimensional, + is_filterable, + is_managed_externally, + is_physical, + metric_type, + python_date_format, + sqlalchemy_uri, + table_id, + verbose_name, + warning_text, + ) in session.execute(query): + try: + extra = json.loads(extra_json) if extra_json else {} + except json.decoder.JSONDecodeError: + extra = {} + updated_extra = {**extra} + updates = {} + + if is_managed_externally: + updates["is_managed_externally"] = True + if external_url: + updates["external_url"] = external_url + + # update extra json + for (key, val) in ( + { + "verbose_name": verbose_name, + "python_date_format": python_date_format, + "d3format": d3format, + "metric_type": metric_type, + } + ).items(): + # save the original val, including if it's `false` + if val is not None: + updated_extra[key] = val + + if updated_extra != extra: + updates["extra_json"] = json.dumps(updated_extra) + + # update expression for physical table columns + if is_physical: + if column_name and sqlalchemy_uri: + drivername = sqlalchemy_uri.split("://")[0] + if is_physical and drivername: + quoted_expression = get_identifier_quoter(drivername)( + column_name + ) + if quoted_expression != column_name: + updates["expression"] = quoted_expression + # duplicate physical columns for tables + physical_columns.append( + dict( + created_on=created_on, + changed_on=changed_on, + changed_by_fk=changed_by_fk, + description=description, + expression=updates.get("expression", column_name), + external_url=external_url, + extra_json=updates.get("extra_json", extra_json), + is_aggregation=False, + is_dimensional=is_dimensional, + is_filterable=is_filterable, + is_managed_externally=is_managed_externally, + is_physical=True, + name=column_name, + table_id=table_id, + warning_text=warning_text, + ) + ) + + if updates: + session.execute( + sa.update(NewColumn) + .where(NewColumn.id == column_id) + .values(**updates) + ) + update_count += 1 + print_update_count() + + if physical_columns: + op.bulk_insert(NewColumn.__table__, physical_columns) + + session.flush() + offset += limit + + if SHOW_PROGRESS: + print("") + + print(" Assign table column relations...") + insert_from_select( + table_column_association_table, + select([NewColumn.table_id, NewColumn.id.label("column_id")]) + .select_from(NewColumn) + .where(and_(NewColumn.is_physical, NewColumn.table_id.isnot(None))), + ) + + +new_tables: sa.Table = [ + NewTable.__table__, + NewDataset.__table__, + NewColumn.__table__, + table_column_association_table, + dataset_column_association_table, + dataset_table_association_table, + dataset_user_association_table, +] + + +def reset_postgres_id_sequence(table: str) -> None: + op.execute( + f""" + SELECT setval( + pg_get_serial_sequence('{table}', 'id'), + COALESCE(max(id) + 1, 1), + false + ) + FROM {table}; + """ + ) + + +def upgrade() -> None: + bind = op.get_bind() + session: Session = Session(bind=bind) + Base.metadata.drop_all(bind=bind, tables=new_tables) + Base.metadata.create_all(bind=bind, tables=new_tables) + + copy_tables(session) + copy_datasets(session) + copy_columns(session) + copy_metrics(session) + session.commit() + + postprocess_columns(session) + session.commit() + + postprocess_datasets(session) + session.commit() + + # Table were created with the same uuids are datasets. They should + # have different uuids as they are different entities. + print(">> Assign new UUIDs to tables...") + assign_uuids(NewTable, session) + + print(">> Drop intermediate columns...") + # These columns are are used during migration, as datasets are independent of tables once created, + # dataset columns also the same to table columns. + with op.batch_alter_table(NewTable.__tablename__) as batch_op: + batch_op.drop_column("sqlatable_id") + with op.batch_alter_table(NewColumn.__tablename__) as batch_op: + batch_op.drop_column("table_id") + + +def downgrade(): + Base.metadata.drop_all(bind=op.get_bind(), tables=new_tables) diff --git a/superset/migrations/versions/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py b/superset/migrations/versions/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py new file mode 100644 index 0000000000000..30efb1a083fc2 --- /dev/null +++ b/superset/migrations/versions/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py @@ -0,0 +1,84 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""rm_time_range_endpoints_from_qc_3 + +Revision ID: ad07e4fdbaba +Revises: cecc6bf46990 +Create Date: 2022-04-18 11:20:47.390901 + +""" + +# revision identifiers, used by Alembic. +revision = "ad07e4fdbaba" +down_revision = "cecc6bf46990" + +import json + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.ext.declarative import declarative_base + +from superset import db + +Base = declarative_base() + + +class Slice(Base): + __tablename__ = "slices" + id = sa.Column(sa.Integer, primary_key=True) + query_context = sa.Column(sa.Text) + slice_name = sa.Column(sa.String(250)) + + +def upgrade_slice(slc: Slice): + try: + query_context = json.loads(slc.query_context) + except json.decoder.JSONDecodeError: + return + + query_context.get("form_data", {}).pop("time_range_endpoints", None) + + if query_context.get("queries"): + queries = query_context["queries"] + for query in queries: + query.get("extras", {}).pop("time_range_endpoints", None) + + slc.query_context = json.dumps(query_context) + + return slc + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + slices_updated = 0 + for slc in ( + session.query(Slice) + .filter(Slice.query_context.like("%time_range_endpoints%")) + .all() + ): + updated_slice = upgrade_slice(slc) + if updated_slice: + slices_updated += 1 + + print(f"slices updated with no time_range_endpoints: {slices_updated}") + session.commit() + session.close() + + +def downgrade(): + pass diff --git a/superset/migrations/versions/b56500de1855_add_uuid_column_to_import_mixin.py b/superset/migrations/versions/b56500de1855_add_uuid_column_to_import_mixin.py index 747ec9fb4f77f..0872cf5b3bb5d 100644 --- a/superset/migrations/versions/b56500de1855_add_uuid_column_to_import_mixin.py +++ b/superset/migrations/versions/b56500de1855_add_uuid_column_to_import_mixin.py @@ -23,19 +23,17 @@ """ import json import os -import time from json.decoder import JSONDecodeError from uuid import uuid4 import sqlalchemy as sa from alembic import op -from sqlalchemy.dialects.mysql.base import MySQLDialect -from sqlalchemy.dialects.postgresql.base import PGDialect from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import load_only from sqlalchemy_utils import UUIDType from superset import db +from superset.migrations.shared.utils import assign_uuids from superset.utils import core as utils # revision identifiers, used by Alembic. @@ -78,47 +76,6 @@ class ImportMixin: default_batch_size = int(os.environ.get("BATCH_SIZE", 200)) -# Add uuids directly using built-in SQL uuid function -add_uuids_by_dialect = { - MySQLDialect: """UPDATE %s SET uuid = UNHEX(REPLACE(CONVERT(UUID() using utf8mb4), '-', ''));""", - PGDialect: """UPDATE %s SET uuid = uuid_in(md5(random()::text || clock_timestamp()::text)::cstring);""", -} - - -def add_uuids(model, table_name, session, batch_size=default_batch_size): - """Populate columns with pre-computed uuids""" - bind = op.get_bind() - objects_query = session.query(model) - count = objects_query.count() - - # silently skip if the table is empty (suitable for db initialization) - if count == 0: - return - - print(f"\nAdding uuids for `{table_name}`...") - start_time = time.time() - - # Use dialect specific native SQL queries if possible - for dialect, sql in add_uuids_by_dialect.items(): - if isinstance(bind.dialect, dialect): - op.execute(sql % table_name) - print(f"Done. Assigned {count} uuids in {time.time() - start_time:.3f}s.") - return - - # Othwewise Use Python uuid function - start = 0 - while start < count: - end = min(start + batch_size, count) - for obj, uuid in map(lambda obj: (obj, uuid4()), objects_query[start:end]): - obj.uuid = uuid - session.merge(obj) - session.commit() - if start + batch_size < count: - print(f" uuid assigned to {end} out of {count}\r", end="") - start += batch_size - - print(f"Done. Assigned {count} uuids in {time.time() - start_time:.3f}s.") - def update_position_json(dashboard, session, uuid_map): try: @@ -178,7 +135,7 @@ def upgrade(): ), ) - add_uuids(model, table_name, session) + assign_uuids(model, session) # add uniqueness constraint with op.batch_alter_table(table_name) as batch_op: @@ -203,7 +160,7 @@ def downgrade(): update_dashboards(session, {}) # remove uuid column - for table_name, model in models.items(): + for table_name in models: with op.batch_alter_table(table_name) as batch_op: batch_op.drop_constraint(f"uq_{table_name}_uuid", type_="unique") batch_op.drop_column("uuid") diff --git a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py index 8728e9adb7b8d..e69d1606e3e71 100644 --- a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py +++ b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py @@ -23,619 +23,23 @@ Create Date: 2021-11-11 16:41:53.266965 """ - -import json -from datetime import date, datetime, time, timedelta -from typing import Callable, List, Optional, Set -from uuid import uuid4 - -import sqlalchemy as sa -from alembic import op -from sqlalchemy import and_, inspect, or_ -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import backref, relationship, Session -from sqlalchemy.schema import UniqueConstraint -from sqlalchemy.sql.type_api import TypeEngine -from sqlalchemy_utils import UUIDType - -from superset import app, db -from superset.connectors.sqla.models import ADDITIVE_METRIC_TYPES -from superset.databases.utils import make_url_safe -from superset.extensions import encrypted_field_factory -from superset.migrations.shared.utils import extract_table_references -from superset.models.core import Database as OriginalDatabase -from superset.sql_parse import Table - # revision identifiers, used by Alembic. revision = "b8d3a24d9131" down_revision = "5afbb1a5849b" -Base = declarative_base() -custom_password_store = app.config["SQLALCHEMY_CUSTOM_PASSWORD_STORE"] -DB_CONNECTION_MUTATOR = app.config["DB_CONNECTION_MUTATOR"] - - -class Database(Base): - - __tablename__ = "dbs" - __table_args__ = (UniqueConstraint("database_name"),) - - id = sa.Column(sa.Integer, primary_key=True) - database_name = sa.Column(sa.String(250), unique=True, nullable=False) - sqlalchemy_uri = sa.Column(sa.String(1024), nullable=False) - password = sa.Column(encrypted_field_factory.create(sa.String(1024))) - impersonate_user = sa.Column(sa.Boolean, default=False) - encrypted_extra = sa.Column(encrypted_field_factory.create(sa.Text), nullable=True) - extra = sa.Column( - sa.Text, - default=json.dumps( - dict( - metadata_params={}, - engine_params={}, - metadata_cache_timeout={}, - schemas_allowed_for_file_upload=[], - ) - ), - ) - server_cert = sa.Column(encrypted_field_factory.create(sa.Text), nullable=True) - - -class TableColumn(Base): - - __tablename__ = "table_columns" - __table_args__ = (UniqueConstraint("table_id", "column_name"),) - - id = sa.Column(sa.Integer, primary_key=True) - table_id = sa.Column(sa.Integer, sa.ForeignKey("tables.id")) - is_active = sa.Column(sa.Boolean, default=True) - extra = sa.Column(sa.Text) - column_name = sa.Column(sa.String(255), nullable=False) - type = sa.Column(sa.String(32)) - expression = sa.Column(sa.Text) - description = sa.Column(sa.Text) - is_dttm = sa.Column(sa.Boolean, default=False) - filterable = sa.Column(sa.Boolean, default=True) - groupby = sa.Column(sa.Boolean, default=True) - verbose_name = sa.Column(sa.String(1024)) - python_date_format = sa.Column(sa.String(255)) - - -class SqlMetric(Base): - - __tablename__ = "sql_metrics" - __table_args__ = (UniqueConstraint("table_id", "metric_name"),) - - id = sa.Column(sa.Integer, primary_key=True) - table_id = sa.Column(sa.Integer, sa.ForeignKey("tables.id")) - extra = sa.Column(sa.Text) - metric_type = sa.Column(sa.String(32)) - metric_name = sa.Column(sa.String(255), nullable=False) - expression = sa.Column(sa.Text, nullable=False) - warning_text = sa.Column(sa.Text) - description = sa.Column(sa.Text) - d3format = sa.Column(sa.String(128)) - verbose_name = sa.Column(sa.String(1024)) - - -class SqlaTable(Base): - - __tablename__ = "tables" - __table_args__ = (UniqueConstraint("database_id", "schema", "table_name"),) - - def fetch_columns_and_metrics(self, session: Session) -> None: - self.columns = session.query(TableColumn).filter( - TableColumn.table_id == self.id - ) - self.metrics = session.query(SqlMetric).filter(TableColumn.table_id == self.id) - - id = sa.Column(sa.Integer, primary_key=True) - columns: List[TableColumn] = [] - column_class = TableColumn - metrics: List[SqlMetric] = [] - metric_class = SqlMetric - - database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) - database: Database = relationship( - "Database", - backref=backref("tables", cascade="all, delete-orphan"), - foreign_keys=[database_id], - ) - schema = sa.Column(sa.String(255)) - table_name = sa.Column(sa.String(250), nullable=False) - sql = sa.Column(sa.Text) - is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) - external_url = sa.Column(sa.Text, nullable=True) - - -table_column_association_table = sa.Table( - "sl_table_columns", - Base.metadata, - sa.Column("table_id", sa.ForeignKey("sl_tables.id")), - sa.Column("column_id", sa.ForeignKey("sl_columns.id")), -) - -dataset_column_association_table = sa.Table( - "sl_dataset_columns", - Base.metadata, - sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), - sa.Column("column_id", sa.ForeignKey("sl_columns.id")), -) - -dataset_table_association_table = sa.Table( - "sl_dataset_tables", - Base.metadata, - sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), - sa.Column("table_id", sa.ForeignKey("sl_tables.id")), -) - - -class NewColumn(Base): - - __tablename__ = "sl_columns" - - id = sa.Column(sa.Integer, primary_key=True) - name = sa.Column(sa.Text) - type = sa.Column(sa.Text) - expression = sa.Column(sa.Text) - is_physical = sa.Column(sa.Boolean, default=True) - description = sa.Column(sa.Text) - warning_text = sa.Column(sa.Text) - is_temporal = sa.Column(sa.Boolean, default=False) - is_aggregation = sa.Column(sa.Boolean, default=False) - is_additive = sa.Column(sa.Boolean, default=False) - is_spatial = sa.Column(sa.Boolean, default=False) - is_partition = sa.Column(sa.Boolean, default=False) - is_increase_desired = sa.Column(sa.Boolean, default=True) - is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) - external_url = sa.Column(sa.Text, nullable=True) - extra_json = sa.Column(sa.Text, default="{}") - - -class NewTable(Base): - - __tablename__ = "sl_tables" - __table_args__ = (UniqueConstraint("database_id", "catalog", "schema", "name"),) - - id = sa.Column(sa.Integer, primary_key=True) - name = sa.Column(sa.Text) - schema = sa.Column(sa.Text) - catalog = sa.Column(sa.Text) - database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) - database: Database = relationship( - "Database", - backref=backref("new_tables", cascade="all, delete-orphan"), - foreign_keys=[database_id], - ) - columns: List[NewColumn] = relationship( - "NewColumn", secondary=table_column_association_table, cascade="all, delete" - ) - is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) - external_url = sa.Column(sa.Text, nullable=True) - - -class NewDataset(Base): - __tablename__ = "sl_datasets" - - id = sa.Column(sa.Integer, primary_key=True) - sqlatable_id = sa.Column(sa.Integer, nullable=True, unique=True) - name = sa.Column(sa.Text) - expression = sa.Column(sa.Text) - tables: List[NewTable] = relationship( - "NewTable", secondary=dataset_table_association_table - ) - columns: List[NewColumn] = relationship( - "NewColumn", secondary=dataset_column_association_table, cascade="all, delete" - ) - is_physical = sa.Column(sa.Boolean, default=False) - is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) - external_url = sa.Column(sa.Text, nullable=True) - - -TEMPORAL_TYPES = {date, datetime, time, timedelta} - - -def is_column_type_temporal(column_type: TypeEngine) -> bool: - try: - return column_type.python_type in TEMPORAL_TYPES - except NotImplementedError: - return False - - -def load_or_create_tables( - session: Session, - database_id: int, - default_schema: Optional[str], - tables: Set[Table], - conditional_quote: Callable[[str], str], -) -> List[NewTable]: - """ - Load or create new table model instances. - """ - if not tables: - return [] - - # set the default schema in tables that don't have it - if default_schema: - tables = list(tables) - for i, table in enumerate(tables): - if table.schema is None: - tables[i] = Table(table.table, default_schema, table.catalog) - - # load existing tables - predicate = or_( - *[ - and_( - NewTable.database_id == database_id, - NewTable.schema == table.schema, - NewTable.name == table.table, - ) - for table in tables - ] - ) - new_tables = session.query(NewTable).filter(predicate).all() - - # use original database model to get the engine - engine = ( - session.query(OriginalDatabase) - .filter_by(id=database_id) - .one() - .get_sqla_engine(default_schema) - ) - inspector = inspect(engine) - - # add missing tables - existing = {(table.schema, table.name) for table in new_tables} - for table in tables: - if (table.schema, table.table) not in existing: - column_metadata = inspector.get_columns(table.table, schema=table.schema) - columns = [ - NewColumn( - name=column["name"], - type=str(column["type"]), - expression=conditional_quote(column["name"]), - is_temporal=is_column_type_temporal(column["type"]), - is_aggregation=False, - is_physical=True, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - ) - for column in column_metadata - ] - new_tables.append( - NewTable( - name=table.table, - schema=table.schema, - catalog=None, - database_id=database_id, - columns=columns, - ) - ) - existing.add((table.schema, table.table)) - - return new_tables - - -def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals - """ - Copy old datasets to the new models. - """ - session = inspect(target).session - - # get DB-specific conditional quoter for expressions that point to columns or - # table names - database = ( - target.database - or session.query(Database).filter_by(id=target.database_id).first() - ) - if not database: - return - url = make_url_safe(database.sqlalchemy_uri) - dialect_class = url.get_dialect() - conditional_quote = dialect_class().identifier_preparer.quote - - # create columns - columns = [] - for column in target.columns: - # ``is_active`` might be ``None`` at this point, but it defaults to ``True``. - if column.is_active is False: - continue - - try: - extra_json = json.loads(column.extra or "{}") - except json.decoder.JSONDecodeError: - extra_json = {} - for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: - value = getattr(column, attr) - if value: - extra_json[attr] = value - - columns.append( - NewColumn( - name=column.column_name, - type=column.type or "Unknown", - expression=column.expression or conditional_quote(column.column_name), - description=column.description, - is_temporal=column.is_dttm, - is_aggregation=False, - is_physical=column.expression is None or column.expression == "", - is_spatial=False, - is_partition=False, - is_increase_desired=True, - extra_json=json.dumps(extra_json) if extra_json else None, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ), - ) - - # create metrics - for metric in target.metrics: - try: - extra_json = json.loads(metric.extra or "{}") - except json.decoder.JSONDecodeError: - extra_json = {} - for attr in {"verbose_name", "metric_type", "d3format"}: - value = getattr(metric, attr) - if value: - extra_json[attr] = value - - is_additive = ( - metric.metric_type and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES - ) - - columns.append( - NewColumn( - name=metric.metric_name, - type="Unknown", # figuring this out would require a type inferrer - expression=metric.expression, - warning_text=metric.warning_text, - description=metric.description, - is_aggregation=True, - is_additive=is_additive, - is_physical=False, - is_spatial=False, - is_partition=False, - is_increase_desired=True, - extra_json=json.dumps(extra_json) if extra_json else None, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ), - ) - - # physical dataset - if not target.sql: - physical_columns = [column for column in columns if column.is_physical] - - # create table - table = NewTable( - name=target.table_name, - schema=target.schema, - catalog=None, # currently not supported - database_id=target.database_id, - columns=physical_columns, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ) - tables = [table] - - # virtual dataset - else: - # mark all columns as virtual (not physical) - for column in columns: - column.is_physical = False - - # find referenced tables - referenced_tables = extract_table_references(target.sql, dialect_class.name) - tables = load_or_create_tables( - session, - target.database_id, - target.schema, - referenced_tables, - conditional_quote, - ) - - # create the new dataset - dataset = NewDataset( - sqlatable_id=target.id, - name=target.table_name, - expression=target.sql or conditional_quote(target.table_name), - tables=tables, - columns=columns, - is_physical=not target.sql, - is_managed_externally=target.is_managed_externally, - external_url=target.external_url, - ) - session.add(dataset) - - -def upgrade(): - # Create tables for the new models. - op.create_table( - "sl_columns", - # AuditMixinNullable - sa.Column("created_on", sa.DateTime(), nullable=True), - sa.Column("changed_on", sa.DateTime(), nullable=True), - sa.Column("created_by_fk", sa.Integer(), nullable=True), - sa.Column("changed_by_fk", sa.Integer(), nullable=True), - # ExtraJSONMixin - sa.Column("extra_json", sa.Text(), nullable=True), - # ImportExportMixin - sa.Column("uuid", UUIDType(binary=True), primary_key=False, default=uuid4), - # Column - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("name", sa.TEXT(), nullable=False), - sa.Column("type", sa.TEXT(), nullable=False), - sa.Column("expression", sa.TEXT(), nullable=False), - sa.Column( - "is_physical", - sa.BOOLEAN(), - nullable=False, - default=True, - ), - sa.Column("description", sa.TEXT(), nullable=True), - sa.Column("warning_text", sa.TEXT(), nullable=True), - sa.Column("unit", sa.TEXT(), nullable=True), - sa.Column("is_temporal", sa.BOOLEAN(), nullable=False), - sa.Column( - "is_spatial", - sa.BOOLEAN(), - nullable=False, - default=False, - ), - sa.Column( - "is_partition", - sa.BOOLEAN(), - nullable=False, - default=False, - ), - sa.Column( - "is_aggregation", - sa.BOOLEAN(), - nullable=False, - default=False, - ), - sa.Column( - "is_additive", - sa.BOOLEAN(), - nullable=False, - default=False, - ), - sa.Column( - "is_increase_desired", - sa.BOOLEAN(), - nullable=False, - default=True, - ), - sa.Column( - "is_managed_externally", - sa.Boolean(), - nullable=False, - server_default=sa.false(), - ), - sa.Column("external_url", sa.Text(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - with op.batch_alter_table("sl_columns") as batch_op: - batch_op.create_unique_constraint("uq_sl_columns_uuid", ["uuid"]) - - op.create_table( - "sl_tables", - # AuditMixinNullable - sa.Column("created_on", sa.DateTime(), nullable=True), - sa.Column("changed_on", sa.DateTime(), nullable=True), - sa.Column("created_by_fk", sa.Integer(), nullable=True), - sa.Column("changed_by_fk", sa.Integer(), nullable=True), - # ExtraJSONMixin - sa.Column("extra_json", sa.Text(), nullable=True), - # ImportExportMixin - sa.Column("uuid", UUIDType(binary=True), primary_key=False, default=uuid4), - # Table - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("database_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column("catalog", sa.TEXT(), nullable=True), - sa.Column("schema", sa.TEXT(), nullable=True), - sa.Column("name", sa.TEXT(), nullable=False), - sa.Column( - "is_managed_externally", - sa.Boolean(), - nullable=False, - server_default=sa.false(), - ), - sa.Column("external_url", sa.Text(), nullable=True), - sa.ForeignKeyConstraint(["database_id"], ["dbs.id"], name="sl_tables_ibfk_1"), - sa.PrimaryKeyConstraint("id"), - ) - with op.batch_alter_table("sl_tables") as batch_op: - batch_op.create_unique_constraint("uq_sl_tables_uuid", ["uuid"]) - - op.create_table( - "sl_table_columns", - sa.Column("table_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column("column_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint( - ["column_id"], ["sl_columns.id"], name="sl_table_columns_ibfk_2" - ), - sa.ForeignKeyConstraint( - ["table_id"], ["sl_tables.id"], name="sl_table_columns_ibfk_1" - ), - ) - - op.create_table( - "sl_datasets", - # AuditMixinNullable - sa.Column("created_on", sa.DateTime(), nullable=True), - sa.Column("changed_on", sa.DateTime(), nullable=True), - sa.Column("created_by_fk", sa.Integer(), nullable=True), - sa.Column("changed_by_fk", sa.Integer(), nullable=True), - # ExtraJSONMixin - sa.Column("extra_json", sa.Text(), nullable=True), - # ImportExportMixin - sa.Column("uuid", UUIDType(binary=True), primary_key=False, default=uuid4), - # Dataset - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("sqlatable_id", sa.INTEGER(), nullable=True), - sa.Column("name", sa.TEXT(), nullable=False), - sa.Column("expression", sa.TEXT(), nullable=False), - sa.Column( - "is_physical", - sa.BOOLEAN(), - nullable=False, - default=False, - ), - sa.Column( - "is_managed_externally", - sa.Boolean(), - nullable=False, - server_default=sa.false(), - ), - sa.Column("external_url", sa.Text(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - with op.batch_alter_table("sl_datasets") as batch_op: - batch_op.create_unique_constraint("uq_sl_datasets_uuid", ["uuid"]) - batch_op.create_unique_constraint( - "uq_sl_datasets_sqlatable_id", ["sqlatable_id"] - ) - - op.create_table( - "sl_dataset_columns", - sa.Column("dataset_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column("column_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint( - ["column_id"], ["sl_columns.id"], name="sl_dataset_columns_ibfk_2" - ), - sa.ForeignKeyConstraint( - ["dataset_id"], ["sl_datasets.id"], name="sl_dataset_columns_ibfk_1" - ), - ) - - op.create_table( - "sl_dataset_tables", - sa.Column("dataset_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column("table_id", sa.INTEGER(), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint( - ["dataset_id"], ["sl_datasets.id"], name="sl_dataset_tables_ibfk_1" - ), - sa.ForeignKeyConstraint( - ["table_id"], ["sl_tables.id"], name="sl_dataset_tables_ibfk_2" - ), - ) +# ===================== Notice ======================== +# +# Migrations made in this revision has been moved to `new_dataset_models_take_2` +# to fix performance issues as well as a couple of shortcomings in the original +# design. +# +# ====================================================== - # migrate existing datasets to the new models - bind = op.get_bind() - session = db.Session(bind=bind) # pylint: disable=no-member - datasets = session.query(SqlaTable).all() - for dataset in datasets: - dataset.fetch_columns_and_metrics(session) - after_insert(target=dataset) +def upgrade() -> None: + pass def downgrade(): - op.drop_table("sl_dataset_columns") - op.drop_table("sl_dataset_tables") - op.drop_table("sl_datasets") - op.drop_table("sl_table_columns") - op.drop_table("sl_tables") - op.drop_table("sl_columns") + pass diff --git a/superset/migrations/versions/c501b7c653a3_add_missing_uuid_column.py b/superset/migrations/versions/c501b7c653a3_add_missing_uuid_column.py index 4cfbc104c01db..786b41a1c72b8 100644 --- a/superset/migrations/versions/c501b7c653a3_add_missing_uuid_column.py +++ b/superset/migrations/versions/c501b7c653a3_add_missing_uuid_column.py @@ -38,7 +38,7 @@ from superset import db from superset.migrations.versions.b56500de1855_add_uuid_column_to_import_mixin import ( - add_uuids, + assign_uuids, models, update_dashboards, ) @@ -73,7 +73,7 @@ def upgrade(): default=uuid4, ), ) - add_uuids(model, table_name, session) + assign_uuids(model, session) # add uniqueness constraint with op.batch_alter_table(table_name) as batch_op: diff --git a/superset/migrations/versions/cecc6bf46990_rm_time_range_endpoints_2.py b/superset/migrations/versions/cecc6bf46990_rm_time_range_endpoints_2.py new file mode 100644 index 0000000000000..bd2532e88a1c2 --- /dev/null +++ b/superset/migrations/versions/cecc6bf46990_rm_time_range_endpoints_2.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""rm_time_range_endpoints_2 + +Revision ID: cecc6bf46990 +Revises: 9d8a8d575284 +Create Date: 2022-04-14 17:21:53.996022 + +""" + +# revision identifiers, used by Alembic. +revision = "cecc6bf46990" +down_revision = "9d8a8d575284" + + +def upgrade(): + pass + + +def downgrade(): + pass diff --git a/superset/migrations/versions/f1410ed7ec95_migrate_native_filters_to_new_schema.py b/superset/migrations/versions/f1410ed7ec95_migrate_native_filters_to_new_schema.py index 630a7b1062ac6..46b8e5f958670 100644 --- a/superset/migrations/versions/f1410ed7ec95_migrate_native_filters_to_new_schema.py +++ b/superset/migrations/versions/f1410ed7ec95_migrate_native_filters_to_new_schema.py @@ -71,7 +71,7 @@ def downgrade_filters(native_filters: Iterable[Dict[str, Any]]) -> int: filter_state = default_data_mask.get("filterState") if filter_state is not None: changed_filters += 1 - value = filter_state["value"] + value = filter_state.get("value") native_filter["defaultValue"] = value return changed_filters diff --git a/superset/models/core.py b/superset/models/core.py index daa0fb9a7ddfc..c2052749ad8a0 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -408,12 +408,14 @@ def get_sqla_engine( except Exception as ex: raise self.db_engine_spec.get_dbapi_mapped_exception(ex) + @property + def quote_identifier(self) -> Callable[[str], str]: + """Add quotes to potential identifiter expressions if needed""" + return self.get_dialect().identifier_preparer.quote + def get_reserved_words(self) -> Set[str]: return self.get_dialect().preparer.reserved_words - def get_quoter(self) -> Callable[[str, Any], str]: - return self.get_dialect().identifier_preparer.quote - def get_df( # pylint: disable=too-many-locals self, sql: str, diff --git a/superset/models/helpers.py b/superset/models/helpers.py index baa0566c01119..3b4e99159f0b8 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -477,7 +477,7 @@ class ExtraJSONMixin: @property def extra(self) -> Dict[str, Any]: try: - return json.loads(self.extra_json) + return json.loads(self.extra_json) if self.extra_json else {} except (TypeError, JSONDecodeError) as exc: logger.error( "Unable to load an extra json: %r. Leaving empty.", exc, exc_info=True @@ -522,18 +522,23 @@ def warning_markdown(self) -> Optional[str]: def clone_model( - target: Model, ignore: Optional[List[str]] = None, **kwargs: Any + target: Model, + ignore: Optional[List[str]] = None, + keep_relations: Optional[List[str]] = None, + **kwargs: Any, ) -> Model: """ - Clone a SQLAlchemy model. + Clone a SQLAlchemy model. By default will only clone naive column attributes. + To include relationship attributes, use `keep_relations`. """ ignore = ignore or [] table = target.__table__ + primary_keys = table.primary_key.columns.keys() data = { attr: getattr(target, attr) - for attr in table.columns.keys() - if attr not in table.primary_key.columns.keys() and attr not in ignore + for attr in list(table.columns.keys()) + (keep_relations or []) + if attr not in primary_keys and attr not in ignore } data.update(kwargs) diff --git a/superset/models/sql_types/presto_sql_types.py b/superset/models/sql_types/presto_sql_types.py index a314639ca6907..5f36266ccaa4f 100644 --- a/superset/models/sql_types/presto_sql_types.py +++ b/superset/models/sql_types/presto_sql_types.py @@ -14,11 +14,15 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +# pylint: disable=abstract-method from typing import Any, Dict, List, Optional, Type -from sqlalchemy.sql.sqltypes import Integer +from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.sql.sqltypes import DATE, Integer, TIMESTAMP from sqlalchemy.sql.type_api import TypeEngine from sqlalchemy.sql.visitors import Visitable +from sqlalchemy.types import TypeDecorator # _compiler_dispatch is defined to help with type compilation @@ -91,3 +95,35 @@ def python_type(self) -> Optional[Type[Any]]: @classmethod def _compiler_dispatch(cls, _visitor: Visitable, **_kw: Any) -> str: return "ROW" + + +class TimeStamp(TypeDecorator): + """ + A type to extend functionality of timestamp data type. + """ + + impl = TIMESTAMP + + @classmethod + def process_bind_param(cls, value: str, dialect: Dialect) -> str: + """ + Used for in-line rendering of TIMESTAMP data type + as Presto does not support automatic casting. + """ + return f"TIMESTAMP '{value}'" + + +class Date(TypeDecorator): + """ + A type to extend functionality of date data type. + """ + + impl = DATE + + @classmethod + def process_bind_param(cls, value: str, dialect: Dialect) -> str: + """ + Used for in-line rendering of DATE data type + as Presto does not support automatic casting. + """ + return f"DATE '{value}'" diff --git a/superset/reports/api.py b/superset/reports/api.py index e0d2598249d66..2871125c9a322 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -189,6 +189,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "name", "active", "created_by", + "owners", "type", "last_state", "creation_method", @@ -212,6 +213,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "chart": "slice_name", "database": "database_name", "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + "owners": RelatedFieldFilter("first_name", FilterRelatedOwners), } apispec_parameter_schemas = { diff --git a/superset/sql_lab.py b/superset/sql_lab.py index d3e08de92a9cf..567ff0d13d592 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -186,7 +186,7 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-locals apply_ctas: bool = False, ) -> SupersetResultSet: """Executes a single SQL statement""" - database = query.database + database: Database = query.database db_engine_spec = database.db_engine_spec parsed_query = ParsedQuery(sql_statement) sql = parsed_query.stripped() diff --git a/superset/sql_parse.py b/superset/sql_parse.py index e3b2e7c196834..d377986f56573 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -18,7 +18,7 @@ import re from dataclasses import dataclass from enum import Enum -from typing import cast, List, Optional, Set, Tuple +from typing import Any, cast, Iterator, List, Optional, Set, Tuple from urllib import parse import sqlparse @@ -47,10 +47,16 @@ from superset.exceptions import QueryClauseValidationException +try: + from sqloxide import parse_sql as sqloxide_parse +except: # pylint: disable=bare-except + sqloxide_parse = None + RESULT_OPERATIONS = {"UNION", "INTERSECT", "EXCEPT", "SELECT"} ON_KEYWORD = "ON" PRECEDES_TABLE_NAME = {"FROM", "JOIN", "DESCRIBE", "WITH", "LEFT JOIN", "RIGHT JOIN"} CTE_PREFIX = "CTE__" + logger = logging.getLogger(__name__) @@ -176,6 +182,9 @@ def __str__(self) -> str: if part ) + def __eq__(self, __o: object) -> bool: + return str(self) == str(__o) + class ParsedQuery: def __init__(self, sql_statement: str, strip_comments: bool = False): @@ -698,3 +707,75 @@ def insert_rls( ) return token_list + + +# mapping between sqloxide and SQLAlchemy dialects +SQLOXITE_DIALECTS = { + "ansi": {"trino", "trinonative", "presto"}, + "hive": {"hive", "databricks"}, + "ms": {"mssql"}, + "mysql": {"mysql"}, + "postgres": { + "cockroachdb", + "hana", + "netezza", + "postgres", + "postgresql", + "redshift", + "vertica", + }, + "snowflake": {"snowflake"}, + "sqlite": {"sqlite", "gsheets", "shillelagh"}, + "clickhouse": {"clickhouse"}, +} + +RE_JINJA_VAR = re.compile(r"\{\{[^\{\}]+\}\}") +RE_JINJA_BLOCK = re.compile(r"\{[%#][^\{\}%#]+[%#]\}") + + +def extract_table_references( + sql_text: str, sqla_dialect: str, show_warning: bool = True +) -> Set["Table"]: + """ + Return all the dependencies from a SQL sql_text. + """ + dialect = "generic" + tree = None + + if sqloxide_parse: + for dialect, sqla_dialects in SQLOXITE_DIALECTS.items(): + if sqla_dialect in sqla_dialects: + break + sql_text = RE_JINJA_BLOCK.sub(" ", sql_text) + sql_text = RE_JINJA_VAR.sub("abc", sql_text) + try: + tree = sqloxide_parse(sql_text, dialect=dialect) + except Exception as ex: # pylint: disable=broad-except + if show_warning: + logger.warning( + "\nUnable to parse query with sqloxide:\n%s\n%s", sql_text, ex + ) + + # fallback to sqlparse + if not tree: + parsed = ParsedQuery(sql_text) + return parsed.tables + + def find_nodes_by_key(element: Any, target: str) -> Iterator[Any]: + """ + Find all nodes in a SQL tree matching a given key. + """ + if isinstance(element, list): + for child in element: + yield from find_nodes_by_key(child, target) + elif isinstance(element, dict): + for key, value in element.items(): + if key == target: + yield value + else: + yield from find_nodes_by_key(value, target) + + return { + Table(*[part["value"] for part in table["name"][::-1]]) + for table in find_nodes_by_key(tree, "Table") + } diff --git a/superset/tables/models.py b/superset/tables/models.py index e2489445c686b..9a0c07fdcf5a4 100644 --- a/superset/tables/models.py +++ b/superset/tables/models.py @@ -24,26 +24,41 @@ These models are not fully implemented, and shouldn't be used yet. """ -from typing import List +from typing import Any, Dict, Iterable, List, Optional, TYPE_CHECKING import sqlalchemy as sa from flask_appbuilder import Model -from sqlalchemy.orm import backref, relationship +from sqlalchemy import inspect +from sqlalchemy.orm import backref, relationship, Session from sqlalchemy.schema import UniqueConstraint +from sqlalchemy.sql import and_, or_ from superset.columns.models import Column +from superset.connectors.sqla.utils import get_physical_table_metadata from superset.models.core import Database from superset.models.helpers import ( AuditMixinNullable, ExtraJSONMixin, ImportExportMixin, ) +from superset.sql_parse import Table as TableName -association_table = sa.Table( +if TYPE_CHECKING: + from superset.datasets.models import Dataset + +table_column_association_table = sa.Table( "sl_table_columns", Model.metadata, # pylint: disable=no-member - sa.Column("table_id", sa.ForeignKey("sl_tables.id")), - sa.Column("column_id", sa.ForeignKey("sl_columns.id")), + sa.Column( + "table_id", + sa.ForeignKey("sl_tables.id", ondelete="cascade"), + primary_key=True, + ), + sa.Column( + "column_id", + sa.ForeignKey("sl_columns.id", ondelete="cascade"), + primary_key=True, + ), ) @@ -61,7 +76,6 @@ class Table(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): __table_args__ = (UniqueConstraint("database_id", "catalog", "schema", "name"),) id = sa.Column(sa.Integer, primary_key=True) - database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) database: Database = relationship( "Database", @@ -70,6 +84,19 @@ class Table(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): backref=backref("new_tables", cascade="all, delete-orphan"), foreign_keys=[database_id], ) + # The relationship between datasets and columns is 1:n, but we use a + # many-to-many association table to avoid adding two mutually exclusive + # columns(dataset_id and table_id) to Column + columns: List[Column] = relationship( + "Column", + secondary=table_column_association_table, + cascade="all, delete-orphan", + single_parent=True, + # backref is needed for session to skip detaching `dataset` if only `column` + # is loaded. + backref="tables", + ) + datasets: List["Dataset"] # will be populated by Dataset.tables backref # We use ``sa.Text`` for these attributes because (1) in modern databases the # performance is the same as ``VARCHAR``[1] and (2) because some table names can be @@ -80,13 +107,96 @@ class Table(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): schema = sa.Column(sa.Text) name = sa.Column(sa.Text) - # The relationship between tables and columns is 1:n, but we use a many-to-many - # association to differentiate between the relationship between datasets and - # columns. - columns: List[Column] = relationship( - "Column", secondary=association_table, cascade="all, delete" - ) - # Column is managed externally and should be read-only inside Superset is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) external_url = sa.Column(sa.Text, nullable=True) + + @property + def fullname(self) -> str: + return str(TableName(table=self.name, schema=self.schema, catalog=self.catalog)) + + def __repr__(self) -> str: + return f"" + + def sync_columns(self) -> None: + """Sync table columns with the database. Keep metadata for existing columns""" + try: + column_metadata = get_physical_table_metadata( + self.database, self.name, self.schema + ) + except Exception: # pylint: disable=broad-except + column_metadata = [] + + existing_columns = {column.name: column for column in self.columns} + quote_identifier = self.database.quote_identifier + + def update_or_create_column(column_meta: Dict[str, Any]) -> Column: + column_name: str = column_meta["name"] + if column_name in existing_columns: + column = existing_columns[column_name] + else: + column = Column(name=column_name) + column.type = column_meta["type"] + column.is_temporal = column_meta["is_dttm"] + column.expression = quote_identifier(column_name) + column.is_aggregation = False + column.is_physical = True + column.is_spatial = False + column.is_partition = False # TODO: update with accurate is_partition + return column + + self.columns = [update_or_create_column(col) for col in column_metadata] + + @staticmethod + def bulk_load_or_create( + database: Database, + table_names: Iterable[TableName], + default_schema: Optional[str] = None, + sync_columns: Optional[bool] = False, + default_props: Optional[Dict[str, Any]] = None, + ) -> List["Table"]: + """ + Load or create multiple Table instances. + """ + if not table_names: + return [] + + if not database.id: + raise Exception("Database must be already saved to metastore") + + default_props = default_props or {} + session: Session = inspect(database).session + # load existing tables + predicate = or_( + *[ + and_( + Table.database_id == database.id, + Table.schema == (table.schema or default_schema), + Table.name == table.table, + ) + for table in table_names + ] + ) + all_tables = session.query(Table).filter(predicate).order_by(Table.id).all() + + # add missing tables and pull its columns + existing = {(table.schema, table.name) for table in all_tables} + for table in table_names: + schema = table.schema or default_schema + name = table.table + if (schema, name) not in existing: + new_table = Table( + database=database, + database_id=database.id, + name=name, + schema=schema, + catalog=None, + **default_props, + ) + if sync_columns: + new_table.sync_columns() + all_tables.append(new_table) + existing.add((schema, name)) + session.add(new_table) + + return all_tables diff --git a/superset/utils/csv.py b/superset/utils/csv.py index 42d2c557832e9..0dc84ff36a3de 100644 --- a/superset/utils/csv.py +++ b/superset/utils/csv.py @@ -90,11 +90,16 @@ def get_chart_csv_data( def get_chart_dataframe( chart_url: str, auth_cookies: Optional[Dict[str, str]] = None ) -> Optional[pd.DataFrame]: + # Disable all the unnecessary-lambda violations in this function + # pylint: disable=unnecessary-lambda content = get_chart_csv_data(chart_url, auth_cookies) if content is None: return None result = simplejson.loads(content.decode("utf-8")) + + # need to convert float value to string to show full long number + pd.set_option("display.float_format", lambda x: str(x)) df = pd.DataFrame.from_dict(result["result"][0]["data"]) # rebuild hierarchical columns and index diff --git a/superset/utils/pandas_postprocessing/__init__.py b/superset/utils/pandas_postprocessing/__init__.py index 3d180bc372020..9755df984cc56 100644 --- a/superset/utils/pandas_postprocessing/__init__.py +++ b/superset/utils/pandas_postprocessing/__init__.py @@ -28,6 +28,7 @@ ) from superset.utils.pandas_postprocessing.pivot import pivot from superset.utils.pandas_postprocessing.prophet import prophet +from superset.utils.pandas_postprocessing.rename import rename from superset.utils.pandas_postprocessing.resample import resample from superset.utils.pandas_postprocessing.rolling import rolling from superset.utils.pandas_postprocessing.select import select @@ -46,6 +47,7 @@ "geodetic_parse", "pivot", "prophet", + "rename", "resample", "rolling", "select", diff --git a/superset/utils/pandas_postprocessing/flatten.py b/superset/utils/pandas_postprocessing/flatten.py index 3d5a003bf1e5d..2874ac57970a4 100644 --- a/superset/utils/pandas_postprocessing/flatten.py +++ b/superset/utils/pandas_postprocessing/flatten.py @@ -81,14 +81,16 @@ def flatten( """ if _is_multi_index_on_columns(df): df.columns = df.columns.droplevel(drop_levels) - # every cell should be converted to string - df.columns = [ - FLAT_COLUMN_SEPARATOR.join( - # pylint: disable=superfluous-parens - [str(cell) for cell in (series if is_sequence(series) else [series])] - ) - for series in df.columns.to_flat_index() - ] + _columns = [] + for series in df.columns.to_flat_index(): + _cells = [] + for cell in series if is_sequence(series) else [series]: + if pd.notnull(cell): + # every cell should be converted to string + _cells.append(str(cell)) + _columns.append(FLAT_COLUMN_SEPARATOR.join(_cells)) + + df.columns = _columns if reset_index and not isinstance(df.index, pd.RangeIndex): df = df.reset_index(level=0) diff --git a/superset/utils/pandas_postprocessing/rename.py b/superset/utils/pandas_postprocessing/rename.py new file mode 100644 index 0000000000000..0e35a651a8073 --- /dev/null +++ b/superset/utils/pandas_postprocessing/rename.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Dict, Optional, Union + +import pandas as pd +from flask_babel import gettext as _ +from pandas._typing import Level + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.pandas_postprocessing.utils import validate_column_args + + +@validate_column_args("columns") +def rename( + df: pd.DataFrame, + columns: Dict[str, Union[str, None]], + inplace: bool = False, + level: Optional[Level] = None, +) -> pd.DataFrame: + """ + Alter column name of DataFrame + + :param df: DataFrame to rename. + :param columns: The offset string representing target conversion. + :param inplace: Whether to return a new DataFrame. + :param level: In case of a MultiIndex, only rename labels in the specified level. + :return: DataFrame after rename + :raises InvalidPostProcessingError: If the request is unexpected + """ + if not columns: + return df + + try: + _rename_level = df.columns.get_level_values(level=level) + except (IndexError, KeyError) as err: + raise InvalidPostProcessingError from err + + if all(new_name in _rename_level for new_name in columns.values()): + raise InvalidPostProcessingError(_("Label already exists")) + + if inplace: + df.rename(columns=columns, inplace=inplace, level=level) + return df + return df.rename(columns=columns, inplace=inplace, level=level) diff --git a/superset/views/base.py b/superset/views/base.py index 863ca2f84ab67..22e4c5f8d163b 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -45,7 +45,7 @@ from flask_wtf.csrf import CSRFError from flask_wtf.form import FlaskForm from pkg_resources import resource_filename -from sqlalchemy import or_ +from sqlalchemy import exc, or_ from sqlalchemy.orm import Query from werkzeug.exceptions import HTTPException from wtforms import Form @@ -231,6 +231,9 @@ def wraps(self: "BaseSupersetView", *args: Any, **kwargs: Any) -> FlaskResponse: return json_error_response( utils.error_msg_from_exception(ex), status=cast(int, ex.code) ) + except (exc.IntegrityError, exc.DatabaseError, exc.DataError) as ex: + logger.exception(ex) + return json_error_response(utils.error_msg_from_exception(ex), status=422) except Exception as ex: # pylint: disable=broad-except logger.exception(ex) return json_error_response(utils.error_msg_from_exception(ex)) diff --git a/superset/views/base_api.py b/superset/views/base_api.py index 260e5731788bc..01b462bb321f6 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -39,6 +39,7 @@ from superset.stats_logger import BaseStatsLogger from superset.superset_typing import FlaskResponse from superset.utils.core import time_function +from superset.views.base import handle_api_exception logger = logging.getLogger(__name__) get_related_schema = { @@ -386,6 +387,7 @@ def send_stats_metrics( object_ref=False, log_to_statsd=False, ) + @handle_api_exception def info_headless(self, **kwargs: Any) -> Response: """ Add statsd metrics to builtin FAB _info endpoint @@ -399,6 +401,7 @@ def info_headless(self, **kwargs: Any) -> Response: object_ref=False, log_to_statsd=False, ) + @handle_api_exception def get_headless(self, pk: int, **kwargs: Any) -> Response: """ Add statsd metrics to builtin FAB GET endpoint @@ -412,6 +415,7 @@ def get_headless(self, pk: int, **kwargs: Any) -> Response: object_ref=False, log_to_statsd=False, ) + @handle_api_exception def get_list_headless(self, **kwargs: Any) -> Response: """ Add statsd metrics to builtin FAB GET list endpoint @@ -425,6 +429,7 @@ def get_list_headless(self, **kwargs: Any) -> Response: object_ref=False, log_to_statsd=False, ) + @handle_api_exception def post_headless(self) -> Response: """ Add statsd metrics to builtin FAB POST endpoint @@ -438,6 +443,7 @@ def post_headless(self) -> Response: object_ref=False, log_to_statsd=False, ) + @handle_api_exception def put_headless(self, pk: int) -> Response: """ Add statsd metrics to builtin FAB PUT endpoint @@ -451,6 +457,7 @@ def put_headless(self, pk: int) -> Response: object_ref=False, log_to_statsd=False, ) + @handle_api_exception def delete_headless(self, pk: int) -> Response: """ Add statsd metrics to builtin FAB DELETE endpoint @@ -464,6 +471,7 @@ def delete_headless(self, pk: int) -> Response: @safe @statsd_metrics @rison(get_related_schema) + @handle_api_exception def related(self, column_name: str, **kwargs: Any) -> FlaskResponse: """Get related fields data --- @@ -542,6 +550,7 @@ def related(self, column_name: str, **kwargs: Any) -> FlaskResponse: @safe @statsd_metrics @rison(get_related_schema) + @handle_api_exception def distinct(self, column_name: str, **kwargs: Any) -> FlaskResponse: """Get distinct values from field data --- diff --git a/superset/views/core.py b/superset/views/core.py index 27a9a039b2d7b..68ac74b365852 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1587,7 +1587,7 @@ def fave_dashboards(self, user_id: int) -> FlaskResponse: @event_logger.log_this @expose("/created_dashboards//", methods=["GET"]) def created_dashboards(self, user_id: int) -> FlaskResponse: - logging.warning( + logger.warning( "%s.created_dashboards " "This API endpoint is deprecated and will be removed in version 3.0.0", self.__class__.__name__, @@ -1926,6 +1926,8 @@ def dashboard( request.args.get(utils.ReservedUrlParameters.EDIT_MODE.value) == "true" ) + standalone_mode = ReservedUrlParameters.is_standalone_mode() + add_extra_log_payload( dashboard_id=dashboard.id, dashboard_version="v2", @@ -1944,6 +1946,7 @@ def dashboard( bootstrap_data=json.dumps( bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser ), + standalone_mode=standalone_mode, ) @has_access diff --git a/tests/integration_tests/commands_test.py b/tests/integration_tests/commands_test.py index 5ff18b02a93e4..77fbad05f3a39 100644 --- a/tests/integration_tests/commands_test.py +++ b/tests/integration_tests/commands_test.py @@ -16,11 +16,11 @@ # under the License. import copy import json -from unittest.mock import patch import yaml +from flask import g -from superset import db, security_manager +from superset import db from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.v1.assets import ImportAssetsCommand from superset.commands.importers.v1.utils import is_valid_config @@ -58,10 +58,13 @@ def test_is_valid_config(self): class TestImportAssetsCommand(SupersetTestCase): - @patch("superset.dashboards.commands.importers.v1.utils.g") - def test_import_assets(self, mock_g): + def setUp(self): + user = self.get_user("admin") + self.user = user + setattr(g, "user", user) + + def test_import_assets(self): """Test that we can import multiple assets""" - mock_g.user = security_manager.find_user("admin") contents = { "metadata.yaml": yaml.safe_dump(metadata_config), "databases/imported_database.yaml": yaml.safe_dump(database_config), @@ -141,7 +144,7 @@ def test_import_assets(self, mock_g): database = dataset.database assert str(database.uuid) == database_config["uuid"] - assert dashboard.owners == [mock_g.user] + assert dashboard.owners == [self.user] dashboard.owners = [] chart.owners = [] @@ -153,11 +156,8 @@ def test_import_assets(self, mock_g): db.session.delete(database) db.session.commit() - @patch("superset.dashboards.commands.importers.v1.utils.g") - def test_import_v1_dashboard_overwrite(self, mock_g): + def test_import_v1_dashboard_overwrite(self): """Test that assets can be overwritten""" - mock_g.user = security_manager.find_user("admin") - contents = { "metadata.yaml": yaml.safe_dump(metadata_config), "databases/imported_database.yaml": yaml.safe_dump(database_config), diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index afeab6e7db8e9..a027dcffae604 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1796,6 +1796,8 @@ def test_embedded_dashboards(self): self.assertNotEqual(result["uuid"], "") self.assertEqual(result["allowed_domains"], allowed_domains) + db.session.expire_all() + # get returns value resp = self.get_assert_metric(uri, "get_embedded") self.assertEqual(resp.status_code, 200) @@ -1810,9 +1812,13 @@ def test_embedded_dashboards(self): # put succeeds and returns value resp = self.post_assert_metric(uri, {"allowed_domains": []}, "set_embedded") self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertEqual(resp.status_code, 200) self.assertIsNotNone(result["uuid"]) self.assertNotEqual(result["uuid"], "") - self.assertEqual(result["allowed_domains"], allowed_domains) + self.assertEqual(result["allowed_domains"], []) + + db.session.expire_all() # get returns changed value resp = self.get_assert_metric(uri, "get_embedded") @@ -1825,6 +1831,8 @@ def test_embedded_dashboards(self): resp = self.delete_assert_metric(uri, "delete_embedded") self.assertEqual(resp.status_code, 200) + db.session.expire_all() + # get returns 404 resp = self.get_assert_metric(uri, "get_embedded") self.assertEqual(resp.status_code, 404) diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 0c1dc27538d10..70640728ac352 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -1135,7 +1135,7 @@ def test_get_allow_file_upload_false_csv(self): uri = f"api/v1/database/?q={prison.dumps(arguments)}" rv = self.client.get(uri) data = json.loads(rv.data.decode("utf-8")) - assert data["count"] == 0 + assert data["count"] == 1 def test_get_allow_file_upload_filter_no_permission(self): """ diff --git a/tests/integration_tests/db_engine_specs/pinot_tests.py b/tests/integration_tests/db_engine_specs/pinot_tests.py index 803dd67cbacfa..c6e364a8ea5fe 100644 --- a/tests/integration_tests/db_engine_specs/pinot_tests.py +++ b/tests/integration_tests/db_engine_specs/pinot_tests.py @@ -45,6 +45,19 @@ def test_pinot_time_expression_simple_date_format_1d_grain(self): ), ) + def test_pinot_time_expression_simple_date_format_10m_grain(self): + col = column("tstamp") + expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "PT10M") + result = str(expr.compile()) + self.assertEqual( + result, + ( + "DATETIMECONVERT(tstamp, " + + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', " + + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', '10:MINUTES')" + ), + ) + def test_pinot_time_expression_simple_date_format_1w_grain(self): col = column("tstamp") expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "P1W") diff --git a/tests/integration_tests/fixtures/world_bank_dashboard.py b/tests/integration_tests/fixtures/world_bank_dashboard.py index 1ac1706a9dc05..e767036b7d857 100644 --- a/tests/integration_tests/fixtures/world_bank_dashboard.py +++ b/tests/integration_tests/fixtures/world_bank_dashboard.py @@ -111,11 +111,10 @@ def _commit_slices(slices: List[Slice]): def _create_world_bank_dashboard(table: SqlaTable, slices: List[Slice]) -> Dashboard: + from superset.examples.helpers import update_slice_ids from superset.examples.world_bank import dashboard_positions pos = dashboard_positions - from superset.examples.helpers import update_slice_ids - update_slice_ids(pos, slices) table.fetch_metadata() diff --git a/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py new file mode 100644 index 0000000000000..f2abfa9766196 --- /dev/null +++ b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py @@ -0,0 +1,135 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json + +from superset.migrations.versions.ad07e4fdbaba_rm_time_range_endpoints_from_qc_3 import ( + Slice, + upgrade_slice, +) + +sample_query_context = { + "datasource": {"id": 27, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "No filter", + "filters": [], + "extras": { + "time_grain_sqla": "P1D", + "time_range_endpoints": ["inclusive", "exclusive"], + "having": "", + "having_druid": [], + "where": "", + }, + "applied_time_extras": {}, + "columns": ["a", "b"], + "orderby": [], + "annotation_layers": [], + "row_limit": 1000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + "post_processing": [], + } + ], + "form_data": { + "viz_type": "table", + "datasource": "27__table", + "slice_id": 545, + "url_params": {}, + "time_grain_sqla": "P1D", + "time_range": "No filter", + "query_mode": "raw", + "groupby": [], + "metrics": [], + "all_columns": ["a", "b"], + "percent_metrics": [], + "adhoc_filters": [], + "order_by_cols": [], + "row_limit": 1000, + "server_page_length": 10, + "include_time": False, + "order_desc": True, + "table_timestamp_format": "smart_date", + "show_cell_bars": True, + "color_pn": True, + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "full", + }, + "result_format": "json", + "result_type": "full", +} + + +sample_query_context = { + "datasource": {"id": 27, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "No filter", + "filters": [], + "extras": { + "time_grain_sqla": "P1D", + "time_range_endpoints": ["inclusive", "exclusive"], + "having": "", + "having_druid": [], + "where": "", + }, + "applied_time_extras": {}, + "columns": ["a", "b"], + "orderby": [], + "annotation_layers": [], + "row_limit": 1000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + "post_processing": [], + } + ], + "form_data": { + "time_range_endpoints": ["inclusive", "exclusive"], + }, + "result_format": "json", + "result_type": "full", +} + + +def test_upgrade(): + slc = Slice(slice_name="FOO", query_context=json.dumps(sample_query_context)) + + upgrade_slice(slc) + + query_context = json.loads(slc.query_context) + queries = query_context.get("queries") + for q in queries: + extras = q.get("extras", {}) + assert "time_range_endpoints" not in extras + + form_data = query_context.get("form_data", {}) + assert "time_range_endpoints" not in form_data + + +def test_upgrade_bad_json(): + slc = Slice(slice_name="FOO", query_context="abc") + + assert None == upgrade_slice(slc) diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py index bbe062e509ba9..d23b95f53cd3d 100644 --- a/tests/integration_tests/sqla_models_tests.py +++ b/tests/integration_tests/sqla_models_tests.py @@ -455,7 +455,8 @@ def test_fetch_metadata_for_updated_virtual_table(self): # make sure the columns have been mapped properly assert len(table.columns) == 4 - table.fetch_metadata() + table.fetch_metadata(commit=False) + # assert that the removed column has been dropped and # the physical and calculated columns are present assert {col.column_name for col in table.columns} == { @@ -473,6 +474,8 @@ def test_fetch_metadata_for_updated_virtual_table(self): assert VIRTUAL_TABLE_STRING_TYPES[backend].match(cols["mycase"].type) assert cols["expr"].expression == "case when 1 then 1 else 0 end" + db.session.delete(table) + @patch("superset.models.core.Database.db_engine_spec", BigQueryEngineSpec) def test_labels_expected_on_mutated_query(self): query_obj = { diff --git a/tests/integration_tests/utils_tests.py b/tests/integration_tests/utils_tests.py index 5add2c5f6e014..7e8aede6a39c7 100644 --- a/tests/integration_tests/utils_tests.py +++ b/tests/integration_tests/utils_tests.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. # isort:skip_file -import unittest import uuid from datetime import date, datetime, time, timedelta from decimal import Decimal diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 4987aaf0e0e5c..86fb0127b84f3 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -17,7 +17,7 @@ # pylint: disable=redefined-outer-name, import-outside-toplevel import importlib -from typing import Any, Iterator +from typing import Any, Callable, Iterator import pytest from pytest_mock import MockFixture @@ -31,25 +31,33 @@ @pytest.fixture -def session(mocker: MockFixture) -> Iterator[Session]: +def get_session(mocker: MockFixture) -> Callable[[], Session]: """ Create an in-memory SQLite session to test models. """ engine = create_engine("sqlite://") - Session_ = sessionmaker(bind=engine) # pylint: disable=invalid-name - in_memory_session = Session_() - # flask calls session.remove() - in_memory_session.remove = lambda: None + def get_session(): + Session_ = sessionmaker(bind=engine) # pylint: disable=invalid-name + in_memory_session = Session_() - # patch session - mocker.patch( - "superset.security.SupersetSecurityManager.get_session", - return_value=in_memory_session, - ) - mocker.patch("superset.db.session", in_memory_session) + # flask calls session.remove() + in_memory_session.remove = lambda: None - yield in_memory_session + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", + return_value=in_memory_session, + ) + mocker.patch("superset.db.session", in_memory_session) + return in_memory_session + + return get_session + + +@pytest.fixture +def session(get_session) -> Iterator[Session]: + yield get_session() @pytest.fixture(scope="module") diff --git a/tests/unit_tests/datasets/conftest.py b/tests/unit_tests/datasets/conftest.py new file mode 100644 index 0000000000000..9d9403934d0e1 --- /dev/null +++ b/tests/unit_tests/datasets/conftest.py @@ -0,0 +1,118 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, TYPE_CHECKING + +import pytest + +if TYPE_CHECKING: + from superset.connectors.sqla.models import SqlMetric, TableColumn + + +@pytest.fixture +def columns_default() -> Dict[str, Any]: + """Default props for new columns""" + return { + "changed_by": 1, + "created_by": 1, + "datasets": [], + "tables": [], + "is_additive": False, + "is_aggregation": False, + "is_dimensional": False, + "is_filterable": True, + "is_increase_desired": True, + "is_partition": False, + "is_physical": True, + "is_spatial": False, + "is_temporal": False, + "description": None, + "extra_json": "{}", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + } + + +@pytest.fixture +def sample_columns() -> Dict["TableColumn", Dict[str, Any]]: + from superset.connectors.sqla.models import TableColumn + + return { + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"): { + "name": "ds", + "expression": "ds", + "type": "TIMESTAMP", + "is_temporal": True, + "is_physical": True, + }, + TableColumn(column_name="num_boys", type="INTEGER", groupby=True): { + "name": "num_boys", + "expression": "num_boys", + "type": "INTEGER", + "is_dimensional": True, + "is_physical": True, + }, + TableColumn(column_name="region", type="VARCHAR", groupby=True): { + "name": "region", + "expression": "region", + "type": "VARCHAR", + "is_dimensional": True, + "is_physical": True, + }, + TableColumn( + column_name="profit", + type="INTEGER", + groupby=False, + expression="revenue-expenses", + ): { + "name": "profit", + "expression": "revenue-expenses", + "type": "INTEGER", + "is_physical": False, + }, + } + + +@pytest.fixture +def sample_metrics() -> Dict["SqlMetric", Dict[str, Any]]: + from superset.connectors.sqla.models import SqlMetric + + return { + SqlMetric(metric_name="cnt", expression="COUNT(*)", metric_type="COUNT"): { + "name": "cnt", + "expression": "COUNT(*)", + "extra_json": '{"metric_type": "COUNT"}', + "type": "UNKNOWN", + "is_additive": True, + "is_aggregation": True, + "is_filterable": False, + "is_physical": False, + }, + SqlMetric( + metric_name="avg revenue", expression="AVG(revenue)", metric_type="AVG" + ): { + "name": "avg revenue", + "expression": "AVG(revenue)", + "extra_json": '{"metric_type": "AVG"}', + "type": "UNKNOWN", + "is_additive": False, + "is_aggregation": True, + "is_filterable": False, + "is_physical": False, + }, + } diff --git a/tests/unit_tests/datasets/test_models.py b/tests/unit_tests/datasets/test_models.py index d21ef8ea60a94..08e0f11e0d354 100644 --- a/tests/unit_tests/datasets/test_models.py +++ b/tests/unit_tests/datasets/test_models.py @@ -15,14 +15,17 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=import-outside-toplevel, unused-argument, unused-import, too-many-locals, invalid-name, too-many-lines - import json -from datetime import datetime, timezone +from typing import Any, Callable, Dict, List, TYPE_CHECKING from pytest_mock import MockFixture from sqlalchemy.orm.session import Session +from tests.unit_tests.utils.db import get_test_user + +if TYPE_CHECKING: + from superset.connectors.sqla.models import SqlMetric, TableColumn + def test_dataset_model(app_context: None, session: Session) -> None: """ @@ -50,6 +53,7 @@ def test_dataset_model(app_context: None, session: Session) -> None: session.flush() dataset = Dataset( + database=table.database, name="positions", expression=""" SELECT array_agg(array[longitude,latitude]) AS position @@ -148,6 +152,7 @@ def test_cascade_delete_dataset(app_context: None, session: Session) -> None: SELECT array_agg(array[longitude,latitude]) AS position FROM my_catalog.my_schema.my_table """, + database=table.database, tables=[table], columns=[ Column( @@ -185,7 +190,7 @@ def test_dataset_attributes(app_context: None, session: Session) -> None: columns = [ TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), - TableColumn(column_name="user_id", type="INTEGER"), + TableColumn(column_name="num_boys", type="INTEGER"), TableColumn(column_name="revenue", type="INTEGER"), TableColumn(column_name="expenses", type="INTEGER"), TableColumn( @@ -254,6 +259,7 @@ def test_dataset_attributes(app_context: None, session: Session) -> None: "main_dttm_col", "metrics", "offset", + "owners", "params", "perm", "schema", @@ -265,7 +271,13 @@ def test_dataset_attributes(app_context: None, session: Session) -> None: ] -def test_create_physical_sqlatable(app_context: None, session: Session) -> None: +def test_create_physical_sqlatable( + app_context: None, + session: Session, + sample_columns: Dict["TableColumn", Dict[str, Any]], + sample_metrics: Dict["SqlMetric", Dict[str, Any]], + columns_default: Dict[str, Any], +) -> None: """ Test shadow write when creating a new ``SqlaTable``. @@ -274,7 +286,7 @@ def test_create_physical_sqlatable(app_context: None, session: Session) -> None: """ from superset.columns.models import Column from superset.columns.schemas import ColumnSchema - from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.connectors.sqla.models import SqlaTable from superset.datasets.models import Dataset from superset.datasets.schemas import DatasetSchema from superset.models.core import Database @@ -283,19 +295,11 @@ def test_create_physical_sqlatable(app_context: None, session: Session) -> None: engine = session.get_bind() Dataset.metadata.create_all(engine) # pylint: disable=no-member - - columns = [ - TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), - TableColumn(column_name="user_id", type="INTEGER"), - TableColumn(column_name="revenue", type="INTEGER"), - TableColumn(column_name="expenses", type="INTEGER"), - TableColumn( - column_name="profit", type="INTEGER", expression="revenue-expenses" - ), - ] - metrics = [ - SqlMetric(metric_name="cnt", expression="COUNT(*)"), - ] + user1 = get_test_user(1, "abc") + columns = list(sample_columns.keys()) + metrics = list(sample_metrics.keys()) + expected_table_columns = list(sample_columns.values()) + expected_metric_columns = list(sample_metrics.values()) sqla_table = SqlaTable( table_name="old_dataset", @@ -317,6 +321,9 @@ def test_create_physical_sqlatable(app_context: None, session: Session) -> None: "import_time": 1606677834, } ), + created_by=user1, + changed_by=user1, + owners=[user1], perm=None, filter_select_enabled=1, fetch_values_predicate="foo IN (1, 2)", @@ -329,164 +336,85 @@ def test_create_physical_sqlatable(app_context: None, session: Session) -> None: session.flush() # ignore these keys when comparing results - ignored_keys = {"created_on", "changed_on", "uuid"} + ignored_keys = {"created_on", "changed_on"} # check that columns were created column_schema = ColumnSchema() - column_schemas = [ + actual_columns = [ {k: v for k, v in column_schema.dump(column).items() if k not in ignored_keys} for column in session.query(Column).all() ] - assert column_schemas == [ - { - "changed_by": None, - "created_by": None, - "description": None, - "expression": "ds", - "extra_json": "{}", - "id": 1, - "is_increase_desired": True, - "is_additive": False, - "is_aggregation": False, - "is_partition": False, - "is_physical": True, - "is_spatial": False, - "is_temporal": True, - "name": "ds", - "type": "TIMESTAMP", - "unit": None, - "warning_text": None, - "is_managed_externally": False, - "external_url": None, - }, - { - "changed_by": None, - "created_by": None, - "description": None, - "expression": "user_id", - "extra_json": "{}", - "id": 2, - "is_increase_desired": True, - "is_additive": False, - "is_aggregation": False, - "is_partition": False, - "is_physical": True, - "is_spatial": False, - "is_temporal": False, - "name": "user_id", - "type": "INTEGER", - "unit": None, - "warning_text": None, - "is_managed_externally": False, - "external_url": None, - }, - { - "changed_by": None, - "created_by": None, - "description": None, - "expression": "revenue", - "extra_json": "{}", - "id": 3, - "is_increase_desired": True, - "is_additive": False, - "is_aggregation": False, - "is_partition": False, - "is_physical": True, - "is_spatial": False, - "is_temporal": False, - "name": "revenue", - "type": "INTEGER", - "unit": None, - "warning_text": None, - "is_managed_externally": False, - "external_url": None, - }, - { - "changed_by": None, - "created_by": None, - "description": None, - "expression": "expenses", - "extra_json": "{}", - "id": 4, - "is_increase_desired": True, - "is_additive": False, - "is_aggregation": False, - "is_partition": False, + num_physical_columns = len( + [col for col in expected_table_columns if col.get("is_physical") == True] + ) + num_dataset_table_columns = len(columns) + num_dataset_metric_columns = len(metrics) + assert ( + len(actual_columns) + == num_physical_columns + num_dataset_table_columns + num_dataset_metric_columns + ) + + # table columns are created before dataset columns are created + offset = 0 + for i in range(num_physical_columns): + assert actual_columns[i + offset] == { + **columns_default, + **expected_table_columns[i], + "id": i + offset + 1, + # physical columns for table have its own uuid + "uuid": actual_columns[i + offset]["uuid"], "is_physical": True, - "is_spatial": False, - "is_temporal": False, - "name": "expenses", - "type": "INTEGER", - "unit": None, - "warning_text": None, - "is_managed_externally": False, - "external_url": None, - }, - { - "changed_by": None, - "created_by": None, - "description": None, - "expression": "revenue-expenses", - "extra_json": "{}", - "id": 5, - "is_increase_desired": True, - "is_additive": False, - "is_aggregation": False, - "is_partition": False, - "is_physical": False, - "is_spatial": False, - "is_temporal": False, - "name": "profit", - "type": "INTEGER", - "unit": None, - "warning_text": None, - "is_managed_externally": False, - "external_url": None, - }, - { - "changed_by": None, + # table columns do not have creators "created_by": None, - "description": None, - "expression": "COUNT(*)", - "extra_json": "{}", - "id": 6, - "is_increase_desired": True, - "is_additive": False, - "is_aggregation": True, - "is_partition": False, - "is_physical": False, - "is_spatial": False, - "is_temporal": False, - "name": "cnt", - "type": "Unknown", - "unit": None, - "warning_text": None, - "is_managed_externally": False, - "external_url": None, - }, - ] + "tables": [1], + } + + offset += num_physical_columns + for i, column in enumerate(sqla_table.columns): + assert actual_columns[i + offset] == { + **columns_default, + **expected_table_columns[i], + "id": i + offset + 1, + # columns for dataset reuses the same uuid of TableColumn + "uuid": str(column.uuid), + "datasets": [1], + } + + offset += num_dataset_table_columns + for i, metric in enumerate(sqla_table.metrics): + assert actual_columns[i + offset] == { + **columns_default, + **expected_metric_columns[i], + "id": i + offset + 1, + "uuid": str(metric.uuid), + "datasets": [1], + } # check that table was created table_schema = TableSchema() tables = [ - {k: v for k, v in table_schema.dump(table).items() if k not in ignored_keys} - for table in session.query(Table).all() - ] - assert tables == [ { - "extra_json": "{}", - "catalog": None, - "schema": "my_schema", - "name": "old_dataset", - "id": 1, - "database": 1, - "columns": [1, 2, 3, 4], - "created_by": None, - "changed_by": None, - "is_managed_externally": False, - "external_url": None, + k: v + for k, v in table_schema.dump(table).items() + if k not in (ignored_keys | {"uuid"}) } + for table in session.query(Table).all() ] + assert len(tables) == 1 + assert tables[0] == { + "id": 1, + "database": 1, + "created_by": 1, + "changed_by": 1, + "datasets": [1], + "columns": [1, 2, 3], + "extra_json": "{}", + "catalog": None, + "schema": "my_schema", + "name": "old_dataset", + "is_managed_externally": False, + "external_url": None, + } # check that dataset was created dataset_schema = DatasetSchema() @@ -494,26 +422,32 @@ def test_create_physical_sqlatable(app_context: None, session: Session) -> None: {k: v for k, v in dataset_schema.dump(dataset).items() if k not in ignored_keys} for dataset in session.query(Dataset).all() ] - assert datasets == [ - { - "id": 1, - "sqlatable_id": 1, - "name": "old_dataset", - "changed_by": None, - "created_by": None, - "columns": [1, 2, 3, 4, 5, 6], - "is_physical": True, - "tables": [1], - "extra_json": "{}", - "expression": "old_dataset", - "is_managed_externally": False, - "external_url": None, - } - ] + assert len(datasets) == 1 + assert datasets[0] == { + "id": 1, + "uuid": str(sqla_table.uuid), + "created_by": 1, + "changed_by": 1, + "owners": [1], + "name": "old_dataset", + "columns": [4, 5, 6, 7, 8, 9], + "is_physical": True, + "database": 1, + "tables": [1], + "extra_json": "{}", + "expression": "old_dataset", + "is_managed_externally": False, + "external_url": None, + } def test_create_virtual_sqlatable( - mocker: MockFixture, app_context: None, session: Session + app_context: None, + mocker: MockFixture, + session: Session, + sample_columns: Dict["TableColumn", Dict[str, Any]], + sample_metrics: Dict["SqlMetric", Dict[str, Any]], + columns_default: Dict[str, Any], ) -> None: """ Test shadow write when creating a new ``SqlaTable``. @@ -528,7 +462,7 @@ def test_create_virtual_sqlatable( from superset.columns.models import Column from superset.columns.schemas import ColumnSchema - from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.connectors.sqla.models import SqlaTable from superset.datasets.models import Dataset from superset.datasets.schemas import DatasetSchema from superset.models.core import Database @@ -536,8 +470,20 @@ def test_create_virtual_sqlatable( engine = session.get_bind() Dataset.metadata.create_all(engine) # pylint: disable=no-member - - # create the ``Table`` that the virtual dataset points to + user1 = get_test_user(1, "abc") + physical_table_columns: List[Dict[str, Any]] = [ + dict( + name="ds", + is_temporal=True, + type="TIMESTAMP", + expression="ds", + is_physical=True, + ), + dict(name="num_boys", type="INTEGER", expression="num_boys", is_physical=True), + dict(name="revenue", type="INTEGER", expression="revenue", is_physical=True), + dict(name="expenses", type="INTEGER", expression="expenses", is_physical=True), + ] + # create a physical ``Table`` that the virtual dataset points to database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") table = Table( name="some_table", @@ -545,30 +491,26 @@ def test_create_virtual_sqlatable( catalog=None, database=database, columns=[ - Column(name="ds", is_temporal=True, type="TIMESTAMP"), - Column(name="user_id", type="INTEGER"), - Column(name="revenue", type="INTEGER"), - Column(name="expenses", type="INTEGER"), + Column(**props, created_by=user1, changed_by=user1) + for props in physical_table_columns ], ) session.add(table) session.commit() + assert session.query(Table).count() == 1 + assert session.query(Dataset).count() == 0 + # create virtual dataset - columns = [ - TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), - TableColumn(column_name="user_id", type="INTEGER"), - TableColumn(column_name="revenue", type="INTEGER"), - TableColumn(column_name="expenses", type="INTEGER"), - TableColumn( - column_name="profit", type="INTEGER", expression="revenue-expenses" - ), - ] - metrics = [ - SqlMetric(metric_name="cnt", expression="COUNT(*)"), - ] + columns = list(sample_columns.keys()) + metrics = list(sample_metrics.keys()) + expected_table_columns = list(sample_columns.values()) + expected_metric_columns = list(sample_metrics.values()) sqla_table = SqlaTable( + created_by=user1, + changed_by=user1, + owners=[user1], table_name="old_dataset", columns=columns, metrics=metrics, @@ -583,7 +525,7 @@ def test_create_virtual_sqlatable( sql=""" SELECT ds, - user_id, + num_boys, revenue, expenses, revenue - expenses AS profit @@ -607,227 +549,54 @@ def test_create_virtual_sqlatable( session.add(sqla_table) session.flush() - # ignore these keys when comparing results - ignored_keys = {"created_on", "changed_on", "uuid"} + # should not add a new table + assert session.query(Table).count() == 1 + assert session.query(Dataset).count() == 1 - # check that columns were created + # ignore these keys when comparing results + ignored_keys = {"created_on", "changed_on"} column_schema = ColumnSchema() - column_schemas = [ + actual_columns = [ {k: v for k, v in column_schema.dump(column).items() if k not in ignored_keys} for column in session.query(Column).all() ] - assert column_schemas == [ - { - "type": "TIMESTAMP", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": None, - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "ds", - "is_physical": True, - "changed_by": None, - "is_temporal": True, - "id": 1, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": None, - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "user_id", - "is_physical": True, - "changed_by": None, - "is_temporal": False, - "id": 2, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": None, - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "revenue", - "is_physical": True, - "changed_by": None, - "is_temporal": False, - "id": 3, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": None, - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "expenses", - "is_physical": True, - "changed_by": None, - "is_temporal": False, - "id": 4, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "TIMESTAMP", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": "ds", - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "ds", - "is_physical": False, - "changed_by": None, - "is_temporal": True, - "id": 5, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": "user_id", - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "user_id", - "is_physical": False, - "changed_by": None, - "is_temporal": False, - "id": 6, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": "revenue", - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "revenue", - "is_physical": False, - "changed_by": None, - "is_temporal": False, - "id": 7, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": "expenses", - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "expenses", - "is_physical": False, - "changed_by": None, - "is_temporal": False, - "id": 8, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "INTEGER", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": "revenue-expenses", - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "profit", - "is_physical": False, - "changed_by": None, - "is_temporal": False, - "id": 9, - "is_aggregation": False, - "external_url": None, - "is_managed_externally": False, - }, - { - "type": "Unknown", - "is_additive": False, - "extra_json": "{}", - "is_partition": False, - "expression": "COUNT(*)", - "unit": None, - "warning_text": None, - "created_by": None, - "is_increase_desired": True, - "description": None, - "is_spatial": False, - "name": "cnt", + num_physical_columns = len(physical_table_columns) + num_dataset_table_columns = len(columns) + num_dataset_metric_columns = len(metrics) + assert ( + len(actual_columns) + == num_physical_columns + num_dataset_table_columns + num_dataset_metric_columns + ) + + for i, column in enumerate(table.columns): + assert actual_columns[i] == { + **columns_default, + **physical_table_columns[i], + "id": i + 1, + "uuid": str(column.uuid), + "tables": [1], + } + + offset = num_physical_columns + for i, column in enumerate(sqla_table.columns): + assert actual_columns[i + offset] == { + **columns_default, + **expected_table_columns[i], + "id": i + offset + 1, + "uuid": str(column.uuid), "is_physical": False, - "changed_by": None, - "is_temporal": False, - "id": 10, - "is_aggregation": True, - "external_url": None, - "is_managed_externally": False, - }, - ] + "datasets": [1], + } + + offset = num_physical_columns + num_dataset_table_columns + for i, metric in enumerate(sqla_table.metrics): + assert actual_columns[i + offset] == { + **columns_default, + **expected_metric_columns[i], + "id": i + offset + 1, + "uuid": str(metric.uuid), + "datasets": [1], + } # check that dataset was created, and has a reference to the table dataset_schema = DatasetSchema() @@ -835,30 +604,31 @@ def test_create_virtual_sqlatable( {k: v for k, v in dataset_schema.dump(dataset).items() if k not in ignored_keys} for dataset in session.query(Dataset).all() ] - assert datasets == [ - { - "id": 1, - "sqlatable_id": 1, - "name": "old_dataset", - "changed_by": None, - "created_by": None, - "columns": [5, 6, 7, 8, 9, 10], - "is_physical": False, - "tables": [1], - "extra_json": "{}", - "external_url": None, - "is_managed_externally": False, - "expression": """ + assert len(datasets) == 1 + assert datasets[0] == { + "id": 1, + "database": 1, + "uuid": str(sqla_table.uuid), + "name": "old_dataset", + "changed_by": 1, + "created_by": 1, + "owners": [1], + "columns": [5, 6, 7, 8, 9, 10], + "is_physical": False, + "tables": [1], + "extra_json": "{}", + "external_url": None, + "is_managed_externally": False, + "expression": """ SELECT ds, - user_id, + num_boys, revenue, expenses, revenue - expenses AS profit FROM some_table""", - } - ] + } def test_delete_sqlatable(app_context: None, session: Session) -> None: @@ -886,18 +656,21 @@ def test_delete_sqlatable(app_context: None, session: Session) -> None: session.add(sqla_table) session.flush() - datasets = session.query(Dataset).all() - assert len(datasets) == 1 + assert session.query(Dataset).count() == 1 + assert session.query(Table).count() == 1 + assert session.query(Column).count() == 2 session.delete(sqla_table) session.flush() - # test that dataset was also deleted - datasets = session.query(Dataset).all() - assert len(datasets) == 0 + # test that dataset and dataset columns are also deleted + # but the physical table and table columns are kept + assert session.query(Dataset).count() == 0 + assert session.query(Table).count() == 1 + assert session.query(Column).count() == 1 -def test_update_sqlatable( +def test_update_physical_sqlatable_columns( mocker: MockFixture, app_context: None, session: Session ) -> None: """ @@ -929,21 +702,33 @@ def test_update_sqlatable( session.add(sqla_table) session.flush() + assert session.query(Table).count() == 1 + assert session.query(Dataset).count() == 1 + assert session.query(Column).count() == 2 # 1 for table, 1 for dataset + dataset = session.query(Dataset).one() assert len(dataset.columns) == 1 # add a column to the original ``SqlaTable`` instance - sqla_table.columns.append(TableColumn(column_name="user_id", type="INTEGER")) + sqla_table.columns.append(TableColumn(column_name="num_boys", type="INTEGER")) session.flush() - # check that the column was added to the dataset + assert session.query(Column).count() == 3 dataset = session.query(Dataset).one() assert len(dataset.columns) == 2 + for table_column, dataset_column in zip(sqla_table.columns, dataset.columns): + assert table_column.uuid == dataset_column.uuid # delete the column in the original instance sqla_table.columns = sqla_table.columns[1:] session.flush() + # check that the column was added to the dataset and the added columns have + # the correct uuid. + assert session.query(TableColumn).count() == 1 + # the extra Dataset.column is deleted, but Table.column is kept + assert session.query(Column).count() == 2 + # check that the column was also removed from the dataset dataset = session.query(Dataset).one() assert len(dataset.columns) == 1 @@ -957,7 +742,7 @@ def test_update_sqlatable( assert dataset.columns[0].is_temporal is True -def test_update_sqlatable_schema( +def test_update_physical_sqlatable_schema( mocker: MockFixture, app_context: None, session: Session ) -> None: """ @@ -1003,8 +788,11 @@ def test_update_sqlatable_schema( assert new_dataset.tables[0].id == 2 -def test_update_sqlatable_metric( - mocker: MockFixture, app_context: None, session: Session +def test_update_physical_sqlatable_metrics( + mocker: MockFixture, + app_context: None, + session: Session, + get_session: Callable[[], Session], ) -> None: """ Test that updating a ``SqlaTable`` also updates the corresponding ``Dataset``. @@ -1042,6 +830,9 @@ def test_update_sqlatable_metric( session.flush() # check that the metric was created + # 1 physical column for table + (1 column + 1 metric for datasets) + assert session.query(Column).count() == 3 + column = session.query(Column).filter_by(is_physical=False).one() assert column.expression == "COUNT(*)" @@ -1051,111 +842,35 @@ def test_update_sqlatable_metric( assert column.expression == "MAX(ds)" - -def test_update_virtual_sqlatable_references( - mocker: MockFixture, app_context: None, session: Session -) -> None: - """ - Test that changing the SQL of a virtual ``SqlaTable`` updates ``Dataset``. - - When the SQL is modified the list of referenced tables should be updated in the new - ``Dataset`` model. - """ - # patch session - mocker.patch( - "superset.security.SupersetSecurityManager.get_session", return_value=session - ) - - from superset.columns.models import Column - from superset.connectors.sqla.models import SqlaTable, TableColumn - from superset.datasets.models import Dataset - from superset.models.core import Database - from superset.tables.models import Table - - engine = session.get_bind() - Dataset.metadata.create_all(engine) # pylint: disable=no-member - - database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") - table1 = Table( - name="table_a", - schema="my_schema", - catalog=None, - database=database, - columns=[Column(name="a", type="INTEGER")], - ) - table2 = Table( - name="table_b", - schema="my_schema", - catalog=None, - database=database, - columns=[Column(name="b", type="INTEGER")], - ) - session.add(table1) - session.add(table2) - session.commit() - - # create virtual dataset - columns = [TableColumn(column_name="a", type="INTEGER")] - - sqla_table = SqlaTable( - table_name="old_dataset", - columns=columns, - database=database, - schema="my_schema", - sql="SELECT a FROM table_a", + # in a new session, update new columns and metrics at the same time + # reload the sqla_table so we can test the case that accessing an not already + # loaded attribute (`sqla_table.metrics`) while there are updates on the instance + # may trigger `after_update` before the attribute is loaded + session = get_session() + sqla_table = session.query(SqlaTable).filter(SqlaTable.id == sqla_table.id).one() + sqla_table.columns.append( + TableColumn( + column_name="another_column", + is_dttm=0, + type="TIMESTAMP", + expression="concat('a', 'b')", + ) ) - session.add(sqla_table) - session.flush() - - # check that new dataset has table1 - dataset = session.query(Dataset).one() - assert dataset.tables == [table1] - - # change SQL - sqla_table.sql = "SELECT a, b FROM table_a JOIN table_b" - session.flush() - - # check that new dataset has both tables - new_dataset = session.query(Dataset).one() - assert new_dataset.tables == [table1, table2] - assert new_dataset.expression == "SELECT a, b FROM table_a JOIN table_b" - - -def test_quote_expressions(app_context: None, session: Session) -> None: - """ - Test that expressions are quoted appropriately in columns and datasets. - """ - from superset.columns.models import Column - from superset.connectors.sqla.models import SqlaTable, TableColumn - from superset.datasets.models import Dataset - from superset.models.core import Database - from superset.tables.models import Table - - engine = session.get_bind() - Dataset.metadata.create_all(engine) # pylint: disable=no-member - - columns = [ - TableColumn(column_name="has space", type="INTEGER"), - TableColumn(column_name="no_need", type="INTEGER"), - ] - - sqla_table = SqlaTable( - table_name="old dataset", - columns=columns, - metrics=[], - database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + # Here `SqlaTable.after_update` is triggered + # before `sqla_table.metrics` is loaded + sqla_table.metrics.append( + SqlMetric(metric_name="another_metric", expression="COUNT(*)") ) - session.add(sqla_table) + # `SqlaTable.after_update` will trigger again at flushing session.flush() - - dataset = session.query(Dataset).one() - assert dataset.expression == '"old dataset"' - assert dataset.columns[0].expression == '"has space"' - assert dataset.columns[1].expression == "no_need" + assert session.query(Column).count() == 5 -def test_update_physical_sqlatable( - mocker: MockFixture, app_context: None, session: Session +def test_update_physical_sqlatable_database( + mocker: MockFixture, + app_context: None, + session: Session, + get_session: Callable[[], Session], ) -> None: """ Test updating the table on a physical dataset. @@ -1172,9 +887,9 @@ def test_update_physical_sqlatable( from superset.columns.models import Column from superset.connectors.sqla.models import SqlaTable, TableColumn - from superset.datasets.models import Dataset + from superset.datasets.models import Dataset, dataset_column_association_table from superset.models.core import Database - from superset.tables.models import Table + from superset.tables.models import Table, table_column_association_table from superset.tables.schemas import TableSchema engine = session.get_bind() @@ -1184,19 +899,26 @@ def test_update_physical_sqlatable( TableColumn(column_name="a", type="INTEGER"), ] + original_database = Database( + database_name="my_database", sqlalchemy_uri="sqlite://" + ) sqla_table = SqlaTable( - table_name="old_dataset", + table_name="original_table", columns=columns, metrics=[], - database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + database=original_database, ) session.add(sqla_table) session.flush() + assert session.query(Table).count() == 1 + assert session.query(Dataset).count() == 1 + assert session.query(Column).count() == 2 # 1 for table, 1 for dataset + # check that the table was created, and that the created dataset points to it table = session.query(Table).one() assert table.id == 1 - assert table.name == "old_dataset" + assert table.name == "original_table" assert table.schema is None assert table.database_id == 1 @@ -1210,122 +932,200 @@ def test_update_physical_sqlatable( session.add(new_database) session.flush() sqla_table.database = new_database + sqla_table.table_name = "new_table" session.flush() + assert session.query(Dataset).count() == 1 + assert session.query(Table).count() == 2 + # is kept for the old table + # is kept for the updated dataset + # is created for the new table + assert session.query(Column).count() == 3 + # ignore these keys when comparing results ignored_keys = {"created_on", "changed_on", "uuid"} # check that the old table still exists, and that the dataset points to the newly - # created table (id=2) and column (id=2), on the new database (also id=2) + # created table, column and dataset table_schema = TableSchema() tables = [ {k: v for k, v in table_schema.dump(table).items() if k not in ignored_keys} for table in session.query(Table).all() ] - assert tables == [ - { - "created_by": None, - "extra_json": "{}", - "name": "old_dataset", - "changed_by": None, - "catalog": None, - "columns": [1], - "database": 1, - "external_url": None, - "schema": None, - "id": 1, - "is_managed_externally": False, - }, - { - "created_by": None, - "extra_json": "{}", - "name": "old_dataset", - "changed_by": None, - "catalog": None, - "columns": [2], - "database": 2, - "external_url": None, - "schema": None, - "id": 2, - "is_managed_externally": False, - }, - ] + assert tables[0] == { + "id": 1, + "database": 1, + "columns": [1], + "datasets": [], + "created_by": None, + "changed_by": None, + "extra_json": "{}", + "catalog": None, + "schema": None, + "name": "original_table", + "external_url": None, + "is_managed_externally": False, + } + assert tables[1] == { + "id": 2, + "database": 2, + "datasets": [1], + "columns": [3], + "created_by": None, + "changed_by": None, + "catalog": None, + "schema": None, + "name": "new_table", + "is_managed_externally": False, + "extra_json": "{}", + "external_url": None, + } # check that dataset now points to the new table assert dataset.tables[0].database_id == 2 + # and a new column is created + assert len(dataset.columns) == 1 + assert dataset.columns[0].id == 2 # point ``SqlaTable`` back - sqla_table.database_id = 1 + sqla_table.database = original_database + sqla_table.table_name = "original_table" session.flush() - # check that dataset points to the original table + # should not create more table and datasets + assert session.query(Dataset).count() == 1 + assert session.query(Table).count() == 2 + # is deleted for the old table + # is kept for the updated dataset + # is kept for the new table + assert session.query(Column.id).order_by(Column.id).all() == [ + (1,), + (2,), + (3,), + ] + assert session.query(dataset_column_association_table).all() == [(1, 2)] + assert session.query(table_column_association_table).all() == [(1, 1), (2, 3)] + assert session.query(Dataset).filter_by(id=1).one().columns[0].id == 2 + assert session.query(Table).filter_by(id=2).one().columns[0].id == 3 + assert session.query(Table).filter_by(id=1).one().columns[0].id == 1 + + # the dataset points back to the original table assert dataset.tables[0].database_id == 1 + assert dataset.tables[0].name == "original_table" + + # kept the original column + assert dataset.columns[0].id == 2 + session.commit() + session.close() + # querying in a new session should still return the same result + session = get_session() + assert session.query(table_column_association_table).all() == [(1, 1), (2, 3)] -def test_update_physical_sqlatable_no_dataset( + +def test_update_virtual_sqlatable_references( mocker: MockFixture, app_context: None, session: Session ) -> None: """ - Test updating the table on a physical dataset that it creates - a new dataset if one didn't already exist. + Test that changing the SQL of a virtual ``SqlaTable`` updates ``Dataset``. - When updating the table on a physical dataset by pointing it somewhere else (change - in database ID, schema, or table name) we should point the ``Dataset`` to an - existing ``Table`` if possible, and create a new one otherwise. + When the SQL is modified the list of referenced tables should be updated in the new + ``Dataset`` model. """ # patch session mocker.patch( "superset.security.SupersetSecurityManager.get_session", return_value=session ) - mocker.patch("superset.datasets.dao.db.session", session) from superset.columns.models import Column from superset.connectors.sqla.models import SqlaTable, TableColumn from superset.datasets.models import Dataset from superset.models.core import Database from superset.tables.models import Table - from superset.tables.schemas import TableSchema engine = session.get_bind() Dataset.metadata.create_all(engine) # pylint: disable=no-member - columns = [ - TableColumn(column_name="a", type="INTEGER"), - ] + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + table1 = Table( + name="table_a", + schema="my_schema", + catalog=None, + database=database, + columns=[Column(name="a", type="INTEGER")], + ) + table2 = Table( + name="table_b", + schema="my_schema", + catalog=None, + database=database, + columns=[Column(name="b", type="INTEGER")], + ) + session.add(table1) + session.add(table2) + session.commit() + + # create virtual dataset + columns = [TableColumn(column_name="a", type="INTEGER")] sqla_table = SqlaTable( table_name="old_dataset", columns=columns, - metrics=[], - database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + database=database, + schema="my_schema", + sql="SELECT a FROM table_a", ) session.add(sqla_table) session.flush() - # check that the table was created - table = session.query(Table).one() - assert table.id == 1 - - dataset = session.query(Dataset).one() - assert dataset.tables == [table] + # check that new dataset has table1 + dataset: Dataset = session.query(Dataset).one() + assert dataset.tables == [table1] - # point ``SqlaTable`` to a different database - new_database = Database( - database_name="my_other_database", sqlalchemy_uri="sqlite://" - ) - session.add(new_database) + # change SQL + sqla_table.sql = "SELECT a, b FROM table_a JOIN table_b" session.flush() - sqla_table.database = new_database + + # check that new dataset has both tables + new_dataset: Dataset = session.query(Dataset).one() + assert new_dataset.tables == [table1, table2] + assert new_dataset.expression == "SELECT a, b FROM table_a JOIN table_b" + + # automatically add new referenced table + sqla_table.sql = "SELECT a, b, c FROM table_a JOIN table_b JOIN table_c" session.flush() new_dataset = session.query(Dataset).one() + assert len(new_dataset.tables) == 3 + assert new_dataset.tables[2].name == "table_c" - # check that dataset now points to the new table - assert new_dataset.tables[0].database_id == 2 - # point ``SqlaTable`` back - sqla_table.database_id = 1 +def test_quote_expressions(app_context: None, session: Session) -> None: + """ + Test that expressions are quoted appropriately in columns and datasets. + """ + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="has space", type="INTEGER"), + TableColumn(column_name="no_need", type="INTEGER"), + ] + + sqla_table = SqlaTable( + table_name="old dataset", + columns=columns, + metrics=[], + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) session.flush() - # check that dataset points to the original table - assert new_dataset.tables[0].database_id == 1 + dataset = session.query(Dataset).one() + assert dataset.expression == '"old dataset"' + assert dataset.columns[0].expression == '"has space"' + assert dataset.columns[1].expression == "no_need" diff --git a/tests/unit_tests/db_engine_specs/test_gsheets.py b/tests/unit_tests/db_engine_specs/test_gsheets.py index a13895e75e1d5..b050c6fdbf2ab 100644 --- a/tests/unit_tests/db_engine_specs/test_gsheets.py +++ b/tests/unit_tests/db_engine_specs/test_gsheets.py @@ -76,7 +76,11 @@ def test_validate_parameters_catalog( assert errors == [ SupersetError( - message="URL could not be identified", + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, level=ErrorLevel.WARNING, extra={ @@ -97,7 +101,11 @@ def test_validate_parameters_catalog( }, ), SupersetError( - message="URL could not be identified", + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, level=ErrorLevel.WARNING, extra={ @@ -158,7 +166,11 @@ def test_validate_parameters_catalog_and_credentials( errors = GSheetsEngineSpec.validate_parameters(parameters) # ignore: type assert errors == [ SupersetError( - message="URL could not be identified", + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, level=ErrorLevel.WARNING, extra={ diff --git a/tests/unit_tests/migrations/shared/utils_test.py b/tests/unit_tests/migrations/shared/utils_test.py deleted file mode 100644 index cb5b2cbd0e82b..0000000000000 --- a/tests/unit_tests/migrations/shared/utils_test.py +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# pylint: disable=import-outside-toplevel, unused-argument - -""" -Test the SIP-68 migration. -""" - -from pytest_mock import MockerFixture - -from superset.sql_parse import Table - - -def test_extract_table_references(mocker: MockerFixture, app_context: None) -> None: - """ - Test the ``extract_table_references`` helper function. - """ - from superset.migrations.shared.utils import extract_table_references - - assert extract_table_references("SELECT 1", "trino") == set() - assert extract_table_references("SELECT 1 FROM some_table", "trino") == { - Table(table="some_table", schema=None, catalog=None) - } - assert extract_table_references( - "SELECT 1 FROM some_catalog.some_schema.some_table", "trino" - ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} - assert extract_table_references( - "SELECT * FROM some_table JOIN other_table ON some_table.id = other_table.id", - "trino", - ) == { - Table(table="some_table", schema=None, catalog=None), - Table(table="other_table", schema=None, catalog=None), - } - - # test falling back to sqlparse - logger = mocker.patch("superset.migrations.shared.utils.logger") - sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" - assert extract_table_references( - sql, - "trino", - ) == {Table(table="other_table", schema=None, catalog=None)} - logger.warning.assert_called_with("Unable to parse query with sqloxide: %s", sql) diff --git a/tests/unit_tests/pandas_postprocessing/test_rename.py b/tests/unit_tests/pandas_postprocessing/test_rename.py new file mode 100644 index 0000000000000..f49680a352618 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_rename.py @@ -0,0 +1,175 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils import pandas_postprocessing as pp +from tests.unit_tests.fixtures.dataframes import categories_df + + +def test_rename_should_not_side_effect(): + _categories_df = categories_df.copy() + pp.rename( + df=_categories_df, + columns={ + "constant": "constant_newname", + "category": "category_namename", + }, + ) + assert _categories_df.equals(categories_df) + + +def test_rename(): + new_categories_df = pp.rename( + df=categories_df, + columns={ + "constant": "constant_newname", + "category": "category_newname", + }, + ) + assert list(new_categories_df.columns.values) == [ + "constant_newname", + "category_newname", + "dept", + "name", + "asc_idx", + "desc_idx", + "idx_nulls", + ] + assert not new_categories_df.equals(categories_df) + + +def test_should_inplace_rename(): + _categories_df = categories_df.copy() + _categories_df_inplaced = pp.rename( + df=_categories_df, + columns={ + "constant": "constant_newname", + "category": "category_namename", + }, + inplace=True, + ) + assert _categories_df_inplaced.equals(_categories_df) + + +def test_should_rename_on_level(): + iterables = [["m1", "m2"], ["a", "b"], ["x", "y"]] + columns = pd.MultiIndex.from_product(iterables, names=[None, "level1", "level2"]) + df = pd.DataFrame(index=[0, 1, 2], columns=columns, data=1) + """ + m1 m2 + level1 a b a b + level2 x y x y x y x y + 0 1 1 1 1 1 1 1 1 + 1 1 1 1 1 1 1 1 1 + 2 1 1 1 1 1 1 1 1 + """ + post_df = pp.rename( + df=df, + columns={"m1": "new_m1"}, + level=0, + ) + assert post_df.columns.get_level_values(level=0).equals( + pd.Index( + [ + "new_m1", + "new_m1", + "new_m1", + "new_m1", + "m2", + "m2", + "m2", + "m2", + ] + ) + ) + + +def test_should_raise_exception_no_column(): + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=categories_df, + columns={ + "foobar": "foobar2", + }, + ) + + +def test_should_raise_exception_duplication(): + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=categories_df, + columns={ + "constant": "category", + }, + ) + + +def test_should_raise_exception_duplication_on_multiindx(): + iterables = [["m1", "m2"], ["a", "b"], ["x", "y"]] + columns = pd.MultiIndex.from_product(iterables, names=[None, "level1", "level2"]) + df = pd.DataFrame(index=[0, 1, 2], columns=columns, data=1) + """ + m1 m2 + level1 a b a b + level2 x y x y x y x y + 0 1 1 1 1 1 1 1 1 + 1 1 1 1 1 1 1 1 1 + 2 1 1 1 1 1 1 1 1 + """ + + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=df, + columns={ + "m1": "m2", + }, + level=0, + ) + pp.rename( + df=df, + columns={ + "a": "b", + }, + level=1, + ) + + +def test_should_raise_exception_invalid_level(): + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=categories_df, + columns={ + "constant": "new_constant", + }, + level=100, + ) + pp.rename( + df=categories_df, + columns={ + "constant": "new_constant", + }, + level="xxxxx", + ) + + +def test_should_return_df_empty_columns(): + assert pp.rename( + df=categories_df, + columns={}, + ).equals(categories_df) diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py index 4a1ff89d74cc6..d9c5d64c5950c 100644 --- a/tests/unit_tests/sql_parse_tests.py +++ b/tests/unit_tests/sql_parse_tests.py @@ -29,6 +29,7 @@ from superset.exceptions import QueryClauseValidationException from superset.sql_parse import ( add_table_name, + extract_table_references, get_rls_for_table, has_table_query, insert_rls, @@ -1468,3 +1469,51 @@ def test_get_rls_for_table(mocker: MockerFixture, app_context: None) -> None: dataset.get_sqla_row_level_filters.return_value = [] assert get_rls_for_table(candidate, 1, "public") is None + + +def test_extract_table_references(mocker: MockerFixture) -> None: + """ + Test the ``extract_table_references`` helper function. + """ + assert extract_table_references("SELECT 1", "trino") == set() + assert extract_table_references("SELECT 1 FROM some_table", "trino") == { + Table(table="some_table", schema=None, catalog=None) + } + assert extract_table_references("SELECT {{ jinja }} FROM some_table", "trino") == { + Table(table="some_table", schema=None, catalog=None) + } + assert extract_table_references( + "SELECT 1 FROM some_catalog.some_schema.some_table", "trino" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + + # with identifier quotes + assert extract_table_references( + "SELECT 1 FROM `some_catalog`.`some_schema`.`some_table`", "mysql" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + assert extract_table_references( + 'SELECT 1 FROM "some_catalog".some_schema."some_table"', "trino" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + + assert extract_table_references( + "SELECT * FROM some_table JOIN other_table ON some_table.id = other_table.id", + "trino", + ) == { + Table(table="some_table", schema=None, catalog=None), + Table(table="other_table", schema=None, catalog=None), + } + + # test falling back to sqlparse + logger = mocker.patch("superset.sql_parse.logger") + sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" + assert extract_table_references( + sql, + "trino", + ) == {Table(table="other_table", schema=None, catalog=None)} + logger.warning.assert_called_once() + + logger = mocker.patch("superset.migrations.shared.utils.logger") + sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" + assert extract_table_references(sql, "trino", show_warning=False) == { + Table(table="other_table", schema=None, catalog=None) + } + logger.warning.assert_not_called() diff --git a/tests/unit_tests/utils/db.py b/tests/unit_tests/utils/db.py new file mode 100644 index 0000000000000..554c95bd43187 --- /dev/null +++ b/tests/unit_tests/utils/db.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from superset import security_manager + + +def get_test_user(id_: int, username: str) -> Any: + """Create a sample test user""" + return security_manager.user_model( + id=id_, + username=username, + first_name=username, + last_name=username, + email=f"{username}@example.com", + )