diff --git a/CHANGELOG.md b/CHANGELOG.md index 090accb39689e..4b31d644052c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ under the License. ## Change Log +- [2.1.1](#211-sun-apr-23-154421-2023-0100) - [2.1.0](#210-thu-mar-16-211305-2023--0700) - [2.0.1](#201-fri-nov-4-103402-2022--0400) - [2.0.0](#200-tue-jun-28-085302-2022--0400) @@ -29,6 +30,51 @@ under the License. - [1.4.2](#142-sat-mar-19-000806-2022-0200) - [1.4.1](#141) + +### 2.1.1 (Sun Apr 23 15:44:21 2023 +0100) + +**Database Migrations** +- [#23980](https://github.com/apache/superset/pull/23980) fix(migration): handle permalink edge cases correctly (@villebro) +- [#23888](https://github.com/apache/superset/pull/23888) chore(key-value): use json serialization for main resources (@villebro) + +**Fixes** +- [#23723](https://github.com/apache/superset/pull/23723) fix: add enforce URI query params with a specific for MySQL (@dpgaspar) +- [#24970](https://github.com/apache/superset/pull/24970) fix: update permalink schema (@eschutho) +- [#24995](https://github.com/apache/superset/pull/24995) fix: Revert "fix(chart): Time Series set showMaxLabel as null for time xAxis (#20627) (@eschutho) +- [#24513](https://github.com/apache/superset/pull/24513) fix(sqllab): normalize changedOn timestamp (@villebro) +- [#23512](https://github.com/apache/superset/pull/23512) fix: Dashboard not loading with default first value in filter (@geido) +- [#24482](https://github.com/apache/superset/pull/24482) fix(permalink): Incorrect component schema reference (@Nisden) +- [#24166](https://github.com/apache/superset/pull/24166) fix(permalink): migrate to marshmallow codec (@villebro) +- [#24697](https://github.com/apache/superset/pull/24697) fix: import database engine validation (@dpgaspar) +- [#24390](https://github.com/apache/superset/pull/24390) fix: FAB CSS on Superset (@dpgaspar) +- [#24249](https://github.com/apache/superset/pull/24249) fix: dashboard ownership check (@betodealmeida) +- [#23801](https://github.com/apache/superset/pull/23801) fix(plugin-chart-handlebars): Fix TypeError when using handlebars columns raw mode (@fmannhardt) +- [#23566](https://github.com/apache/superset/pull/23566) fix: Filter values are not updating when dependencies are set (@michael-s-molina) +- [#23400](https://github.com/apache/superset/pull/23400) fix: Select all issue with "Dynamically search all filter values" in FilterBar (@geido) +- [#23865](https://github.com/apache/superset/pull/23865) fix: Native time range filter in legacy charts (@kgabryje) +- [#24054](https://github.com/apache/superset/pull/24054) fix: handle temporal columns in presto partitions (@giftig) +- [#23882](https://github.com/apache/superset/pull/23882) fix: handle comments in `has_table_query` (@betodealmeida) +- [#24137](https://github.com/apache/superset/pull/24137) fix: disable SHOW_STACKTRACE by default (@dpgaspar) +- [#24185](https://github.com/apache/superset/pull/24185) fix: db validate parameters permission (@dpgaspar) +- [#23769](https://github.com/apache/superset/pull/23769) fix: allow db driver distinction on enforced URI params (@dpgaspar) +- [#23600](https://github.com/apache/superset/pull/23600) fix: load examples as anon user (@betodealmeida) +- [#23200](https://github.com/apache/superset/pull/23200) fix: permission checks on import (@betodealmeida) +- [#23901](https://github.com/apache/superset/pull/23901) fix: check sqlalchemy_uri (@dpgaspar) +- [#23751](https://github.com/apache/superset/pull/23751) fix(mssql): apply top after distinct (@villebro) +- [#23586](https://github.com/apache/superset/pull/23586) fix(dashboard-rbac): use normal rbac when no roles chosen (@villebro) +- [#23582](https://github.com/apache/superset/pull/23582) fix(dash import): Ensure old datasource ids are not referenced in imported charts (@jfrag1) +- [#23506](https://github.com/apache/superset/pull/23506) fix(generic-x-axis): skip initial time filter for legacy charts (@villebro) +- [#23507](https://github.com/apache/superset/pull/23507) fix(legacy-plugin-chart-heatmap): fix adhoc column tooltip (@villebro) +- [#23441](https://github.com/apache/superset/pull/23441) fix(chart): non existent time grain no longer breaks the application (@rdubois) +- [#23393](https://github.com/apache/superset/pull/23393) fix(Pivot Table v2): resolved full width issue (@AkashBoora) +- [#22851](https://github.com/apache/superset/pull/22851) fix: Validate jinja rendered query (@geido) + +**Others** +- [#24586](https://github.com/apache/superset/pull/24586) chore(metastore-cache): add codec support (@villebro) +- [#23113](https://github.com/apache/superset/pull/23113) chore(sqla): Address performance tradeoff with eager loading (@john-bodley) +- [#24294](https://github.com/apache/superset/pull/24294) chore: update UPDATING for 2.1.0 (@eschutho) +- [#24056](https://github.com/apache/superset/pull/24056) chore: Remove unnecessary information from response (@geido) + ### 2.1.0 (Thu Mar 16 21:13:05 2023 -0700) **Database Migrations** diff --git a/UPDATING.md b/UPDATING.md index 90861325bdaf5..0f0e4fafa7d51 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -32,7 +32,6 @@ assists people when migrating to a new version. - [24628](https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised. - [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised. - [24335](https://github.com/apache/superset/pull/24335): Removed deprecated API `/superset/filter////` -- [24185](https://github.com/apache/superset/pull/24185): `/api/v1/database/test_connection` and `api/v1/database/validate_parameters` permissions changed from `can_read` to `can_write`. Only Admin user's have access. - [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated. - [23652](https://github.com/apache/superset/pull/23652): Enables GENERIC_CHART_AXES feature flag by default. - [23226](https://github.com/apache/superset/pull/23226): Migrated endpoint `/estimate_query_cost/` to `/api/v1/sqllab/estimate/`. Corresponding permissions are can estimate query cost on SQLLab. Make sure you add/replace the necessary permissions on any custom roles you may have. @@ -84,6 +83,12 @@ assists people when migrating to a new version. - [24982](https://github.com/apache/superset/pull/24982): By default, physical datasets on Oracle-like dialects like Snowflake will now use denormalized column names. However, existing datasets won't be affected. To change this behavior, the "Advanced" section on the dataset modal has a "Normalize column names" flag which can be changed to change this behavior. +## 2.1.1 +- [24185](https://github.com/apache/superset/pull/24185): `/api/v1/database/test_connection` and `api/v1/database/validate_parameters` permissions changed from `can_read` to `can_write`. Only Admin user's have access. + +### Other +- [23888](https://github.com/apache/superset/pull/23888): Database Migration for json serialization instead of pickle should upgrade/downgrade correctly when bumping to/from this patch version + ## 2.1.0 - [22809](https://github.com/apache/superset/pull/22809): Migrated endpoint `/superset/sql_json` and `/superset/results/` to `/api/v1/sqllab/execute/` and `/api/v1/sqllab/results/` respectively. Corresponding permissions are `can sql_json on Superset` to `can execute on SQLLab`, `can results on Superset` to `can results on SQLLab`. Make sure you add/replace the necessary permissions on any custom roles you may have. diff --git a/docs/docs/databases/db-connection-ui.mdx b/docs/docs/databases/db-connection-ui.mdx index 833b865c8de5b..67303c802a6c8 100644 --- a/docs/docs/databases/db-connection-ui.mdx +++ b/docs/docs/databases/db-connection-ui.mdx @@ -28,7 +28,7 @@ We added a new configuration option where the admin can define their preferred d # displayed prominently in the "Add Database" dialog. You should # use the "engine_name" attribute of the corresponding DB engine spec # in `superset/db_engine_specs/`. -PREFERRED_DATABASES: List[str] = [ +PREFERRED_DATABASES: list[str] = [ "PostgreSQL", "Presto", "MySQL", diff --git a/docs/docs/frequently-asked-questions.mdx b/docs/docs/frequently-asked-questions.mdx index bbb94d617b986..909ba8ebfd4da 100644 --- a/docs/docs/frequently-asked-questions.mdx +++ b/docs/docs/frequently-asked-questions.mdx @@ -6,35 +6,52 @@ sidebar_position: 7 ## Frequently Asked Questions + +### How big of a dataset can Superset handle? + +Superset can work with even gigantic databases! Superset acts as a thin layer above your underlying +databases or data engines, which do all the processing. Superset simply visualizes the results of +the query. + +The key to achieving acceptable performance in Superset is whether your database can execute queries +and return results at a speed that is acceptable to your users. If you experience slow performance with +Superset, benchmark and tune your data warehouse. + +### What are the computing specifications required to run Superset? + +The specs of your Superset installation depend on how many users you have and what their activity is, not +on the size of your data. Superset admins in the community have reported 8GB RAM, 2vCPUs as adequate to +run a moderately-sized instance. To develop Superset, e.g., compile code or build images, you may +need more power. + +Monitor your resource usage and increase or decrease as needed. Note that Superset usage has a tendency +to occur in spikes, e.g., if everyone in a meeting loads the same dashboard at once. + +Superset's application metadata does not require a very large database to store it, though +the log file grows over time. + + ### Can I join / query multiple tables at one time? Not in the Explore or Visualization UI. A Superset SQLAlchemy datasource can only be a single table or a view. -When working with tables, the solution would be to materialize a table that contains all the fields +When working with tables, the solution would be to create a table that contains all the fields needed for your analysis, most likely through some scheduled batch process. -A view is a simple logical layer that abstract an arbitrary SQL queries as a virtual table. This can -allow you to join and union multiple tables, and to apply some transformation using arbitrary SQL -expressions. The limitation there is your database performance as Superset effectively will run a +A view is a simple logical layer that abstracts an arbitrary SQL queries as a virtual table. This can +allow you to join and union multiple tables and to apply some transformation using arbitrary SQL +expressions. The limitation there is your database performance, as Superset effectively will run a query on top of your query (view). A good practice may be to limit yourself to joining your main large table to one or many small tables only, and avoid using _GROUP BY_ where possible as Superset will do its own _GROUP BY_ and doing the work twice might slow down performance. -Whether you use a table or a view, the important factor is whether your database is fast enough to -serve it in an interactive fashion to provide a good user experience in Superset. +Whether you use a table or a view, performance depends on how fast your database can deliver +the result to users interacting with Superset. However, if you are using SQL Lab, there is no such limitation. You can write SQL queries to join multiple tables as long as your database account has access to the tables. -### How BIG can my datasource be? - -It can be gigantic! Superset acts as a thin layer above your underlying databases or data engines. - -As mentioned above, the main criteria is whether your database can execute queries and return -results in a time frame that is acceptable to your users. Many distributed databases out there can -execute queries that scan through terabytes in an interactive fashion. - ### How do I create my own visualization? We recommend reading the instructions in @@ -192,8 +209,9 @@ only a few database engines are supported for use as the OLTP backend / metadata Superset is tested using MySQL, PostgreSQL, and SQLite backends. It’s recommended you install Superset on one of these database servers for production. Installation on other OLTP databases -may work but isn’t tested. Column-store, non-OLTP databases are not designed for this type of workload. - +may work but isn’t tested. It has been reported that [Microsoft SQL Server does *not* +work as a Superset backend](https://github.com/apache/superset/issues/18961). Column-store, +non-OLTP databases are not designed for this type of workload. ### How can I configure OAuth authentication and authorization? diff --git a/docs/docs/security/cves.mdx b/docs/docs/security/cves.mdx index 148af09c54c98..95776505372a2 100644 --- a/docs/docs/security/cves.mdx +++ b/docs/docs/security/cves.mdx @@ -4,20 +4,34 @@ hide_title: true sidebar_position: 2 --- +#### Version 2.1.1 + +| CVE | Title | Affected | +|:---------------|:------------------------------------------------------------------------|---------:| +| CVE-2023-36387 | Improper API permission for low privilege users | < 2.1.1 | +| CVE-2023-36388 | Improper API permission for low privilege users allows for SSRF | < 2.1.1 | +| CVE-2023-27523 | Improper data permission validation on Jinja templated queries | < 2.1.1 | +| CVE-2023-27526 | Improper Authorization check on import charts | < 2.1.1 | +| CVE-2023-39264 | Stack traces enabled by default | < 2.1.1 | +| CVE-2023-39265 | Possible Unauthorized Registration of SQLite Database Connections | < 2.1.1 | +| CVE-2023-37941 | Metadata db write access can lead to remote code execution | < 2.1.1 | +| CVE-2023-32672 | SQL parser edge case bypasses data access authorization | < 2.1.1 | + + #### Version 2.1.0 -| CVE | Title | Affected | -| :------------- | :---------------------------------------------------------------------- | -----------------:| -| CVE-2023-25504 | Possible SSRF on import datasets | <= 2.1.0 | -| CVE-2023-27524 | Session validation vulnerability when using provided default SECRET_KEY | <= 2.1.0 | -| CVE-2023-27525 | Incorrect default permissions for Gamma role | <= 2.1.0 | -| CVE-2023-30776 | Database connection password leak | <= 2.1.0 | +| CVE | Title | Affected | +|:---------------|:------------------------------------------------------------------------|---------:| +| CVE-2023-25504 | Possible SSRF on import datasets | < 2.1.0 | +| CVE-2023-27524 | Session validation vulnerability when using provided default SECRET_KEY | < 2.1.0 | +| CVE-2023-27525 | Incorrect default permissions for Gamma role | < 2.1.0 | +| CVE-2023-30776 | Database connection password leak | < 2.1.0 | #### Version 2.0.1 -| CVE | Title | Affected | -| :------------- | :---------------------------------------------------------- | -----------------:| +| CVE | Title | Affected | +|:---------------|:------------------------------------------------------------|------------------:| | CVE-2022-41703 | SQL injection vulnerability in adhoc clauses | < 2.0.1 or <1.5.2 | | CVE-2022-43717 | Cross-Site Scripting on dashboards | < 2.0.1 or <1.5.2 | | CVE-2022-43718 | Cross-Site Scripting vulnerability on upload forms | < 2.0.1 or <1.5.2 | diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js index 917440accddc5..d198672ef3edb 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js @@ -32,13 +32,13 @@ function openDashboardsAddedTo() { cy.getBySel('actions-trigger').click(); cy.get('.ant-dropdown-menu-submenu-title') .contains('Dashboards added to') - .trigger('mouseover'); + .trigger('mouseover', { force: true }); } function closeDashboardsAddedTo() { cy.get('.ant-dropdown-menu-submenu-title') .contains('Dashboards added to') - .trigger('mouseout'); + .trigger('mouseout', { force: true }); cy.getBySel('actions-trigger').click(); } diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx index 4348bf1561c97..fb150445f4e93 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx @@ -24,7 +24,7 @@ */ /* eslint no-underscore-dangle: ["error", { "allow": ["", "__timestamp"] }] */ -import React from 'react'; +import React, { memo, useCallback, useEffect, useRef, useState } from 'react'; import { CategoricalColorNamespace, Datasource, @@ -40,7 +40,7 @@ import sandboxedEval from './utils/sandbox'; // eslint-disable-next-line import/extensions import fitViewport, { Viewport } from './utils/fitViewport'; import { - DeckGLContainer, + DeckGLContainerHandle, DeckGLContainerStyledWrapper, } from './DeckGLContainer'; import { Point } from './types'; @@ -83,82 +83,72 @@ export type CategoricalDeckGLContainerProps = { setControlValue: (control: string, value: JsonValue) => void; }; -export type CategoricalDeckGLContainerState = { - formData?: QueryFormData; - viewport: Viewport; - categories: JsonObject; -}; - -export default class CategoricalDeckGLContainer extends React.PureComponent< - CategoricalDeckGLContainerProps, - CategoricalDeckGLContainerState -> { - containerRef = React.createRef(); - - /* - * A Deck.gl container that handles categories. - * - * The container will have an interactive legend, populated from the - * categories present in the data. - */ - constructor(props: CategoricalDeckGLContainerProps) { - super(props); - this.state = this.getStateFromProps(props); - - this.getLayers = this.getLayers.bind(this); - this.toggleCategory = this.toggleCategory.bind(this); - this.showSingleCategory = this.showSingleCategory.bind(this); - } - - UNSAFE_componentWillReceiveProps(nextProps: CategoricalDeckGLContainerProps) { - if (nextProps.payload.form_data !== this.state.formData) { - this.setState({ ...this.getStateFromProps(nextProps) }); - } - } +const CategoricalDeckGLContainer = (props: CategoricalDeckGLContainerProps) => { + const containerRef = useRef(null); - // eslint-disable-next-line class-methods-use-this - getStateFromProps( - props: CategoricalDeckGLContainerProps, - state?: CategoricalDeckGLContainerState, - ) { - const features = props.payload.data.features || []; - const categories = getCategories(props.formData, features); - - // the state is computed only from the payload; if it hasn't changed, do - // not recompute state since this would reset selections and/or the play - // slider position due to changes in form controls - if (state && props.payload.form_data === state.formData) { - return { ...state, categories }; - } - - const { width, height, formData } = props; - let { viewport } = props; - if (formData.autozoom) { + const getAdjustedViewport = useCallback(() => { + let viewport = { ...props.viewport }; + if (props.formData.autozoom) { viewport = fitViewport(viewport, { - width, - height, - points: props.getPoints(features), + width: props.width, + height: props.height, + points: props.getPoints(props.payload.data.features || []), }); } if (viewport.zoom < 0) { viewport.zoom = 0; } + return viewport; + }, [props]); + + const [categories, setCategories] = useState( + getCategories(props.formData, props.payload.data.features || []), + ); + const [stateFormData, setStateFormData] = useState( + props.payload.form_data, + ); + const [viewport, setViewport] = useState(getAdjustedViewport()); + + useEffect(() => { + if (props.payload.form_data !== stateFormData) { + const features = props.payload.data.features || []; + const categories = getCategories(props.formData, features); + + setViewport(getAdjustedViewport()); + setStateFormData(props.payload.form_data); + setCategories(categories); + } + }, [getAdjustedViewport, props, stateFormData]); - return { - viewport, - selected: [], - lastClick: 0, - formData: props.payload.form_data, - categories, - }; - } + const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => { + const { current } = containerRef; + if (current) { + current.setTooltip(tooltip); + } + }, []); + + const addColor = useCallback((data: JsonObject[], fd: QueryFormData) => { + const c = fd.color_picker || { r: 0, g: 0, b: 0, a: 1 }; + const colorFn = getScale(fd.color_scheme); + + return data.map(d => { + let color; + if (fd.dimension) { + color = hexToRGB(colorFn(d.cat_color, fd.sliceId), c.a * 255); + + return { ...d, color }; + } + + return d; + }); + }, []); - getLayers() { - const { getLayer, payload, formData: fd, onAddFilter } = this.props; + const getLayers = useCallback(() => { + const { getLayer, payload, formData: fd, onAddFilter } = props; let features = payload.data.features ? [...payload.data.features] : []; // Add colors from categories or fixed color - features = this.addColor(features, fd); + features = addColor(features, fd); // Apply user defined data mutator if defined if (fd.js_data_mutator) { @@ -167,9 +157,8 @@ export default class CategoricalDeckGLContainer extends React.PureComponent< } // Show only categories selected in the legend - const cats = this.state.categories; if (fd.dimension) { - features = features.filter(d => cats[d.cat_color]?.enabled); + features = features.filter(d => categories[d.cat_color]?.enabled); } const filteredPayload = { @@ -182,88 +171,69 @@ export default class CategoricalDeckGLContainer extends React.PureComponent< fd, filteredPayload, onAddFilter, - this.setTooltip, - this.props.datasource, + setTooltip, + props.datasource, ) as Layer, ]; - } - - // eslint-disable-next-line class-methods-use-this - addColor(data: JsonObject[], fd: QueryFormData) { - const c = fd.color_picker || { r: 0, g: 0, b: 0, a: 1 }; - const colorFn = getScale(fd.color_scheme); - - return data.map(d => { - let color; - if (fd.dimension) { - color = hexToRGB(colorFn(d.cat_color, fd.sliceId), c.a * 255); - - return { ...d, color }; + }, [addColor, categories, props, setTooltip]); + + const toggleCategory = useCallback( + (category: string) => { + const categoryState = categories[category]; + const categoriesExtended = { + ...categories, + [category]: { + ...categoryState, + enabled: !categoryState.enabled, + }, + }; + + // if all categories are disabled, enable all -- similar to nvd3 + if (Object.values(categoriesExtended).every(v => !v.enabled)) { + /* eslint-disable no-param-reassign */ + Object.values(categoriesExtended).forEach(v => { + v.enabled = true; + }); } - - return d; - }); - } - - toggleCategory(category: string) { - const categoryState = this.state.categories[category]; - const categories = { - ...this.state.categories, - [category]: { - ...categoryState, - enabled: !categoryState.enabled, - }, - }; - - // if all categories are disabled, enable all -- similar to nvd3 - if (Object.values(categories).every(v => !v.enabled)) { - /* eslint-disable no-param-reassign */ - Object.values(categories).forEach(v => { - v.enabled = true; + setCategories(categoriesExtended); + }, + [categories], + ); + + const showSingleCategory = useCallback( + (category: string) => { + const modifiedCategories = { ...categories }; + Object.values(modifiedCategories).forEach(v => { + v.enabled = false; }); - } - this.setState({ categories }); - } - - showSingleCategory(category: string) { - const categories = { ...this.state.categories }; - /* eslint-disable no-param-reassign */ - Object.values(categories).forEach(v => { - v.enabled = false; - }); - categories[category].enabled = true; - this.setState({ categories }); - } - - setTooltip = (tooltip: TooltipProps['tooltip']) => { - const { current } = this.containerRef; - if (current) { - current.setTooltip(tooltip); - } - }; + modifiedCategories[category].enabled = true; + setCategories(modifiedCategories); + }, + [categories], + ); + + return ( +
+ + +
+ ); +}; - render() { - return ( -
- - -
- ); - } -} +export default memo(CategoricalDeckGLContainer); diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/DeckGLContainer.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/DeckGLContainer.tsx index 29672febfb154..7b8f61e18b581 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/DeckGLContainer.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/DeckGLContainer.tsx @@ -20,11 +20,19 @@ * specific language governing permissions and limitations * under the License. */ -import React, { ReactNode } from 'react'; +import React, { + forwardRef, + memo, + ReactNode, + useCallback, + useEffect, + useImperativeHandle, + useState, +} from 'react'; import { isEqual } from 'lodash'; import { StaticMap } from 'react-map-gl'; import DeckGL, { Layer } from 'deck.gl/typed'; -import { JsonObject, JsonValue, styled } from '@superset-ui/core'; +import { JsonObject, JsonValue, styled, usePrevious } from '@superset-ui/core'; import Tooltip, { TooltipProps } from './components/Tooltip'; import 'mapbox-gl/dist/mapbox-gl.css'; import { Viewport } from './utils/fitViewport'; @@ -43,76 +51,57 @@ export type DeckGLContainerProps = { onViewportChange?: (viewport: Viewport) => void; }; -export type DeckGLContainerState = { - lastUpdate: number | null; - viewState: Viewport; - tooltip: TooltipProps['tooltip']; - timer: ReturnType; -}; - -export class DeckGLContainer extends React.Component< - DeckGLContainerProps, - DeckGLContainerState -> { - constructor(props: DeckGLContainerProps) { - super(props); - this.tick = this.tick.bind(this); - this.onViewStateChange = this.onViewStateChange.bind(this); - // This has to be placed after this.tick is bound to this - this.state = { - timer: setInterval(this.tick, TICK), - tooltip: null, - viewState: props.viewport, - lastUpdate: null, - }; - } +export const DeckGLContainer = memo( + forwardRef((props: DeckGLContainerProps, ref) => { + const [tooltip, setTooltip] = useState(null); + const [lastUpdate, setLastUpdate] = useState(null); + const [viewState, setViewState] = useState(props.viewport); + const prevViewport = usePrevious(props.viewport); - UNSAFE_componentWillReceiveProps(nextProps: DeckGLContainerProps) { - if (!isEqual(nextProps.viewport, this.props.viewport)) { - this.setState({ viewState: nextProps.viewport }); - } - } + useImperativeHandle(ref, () => ({ setTooltip }), []); - componentWillUnmount() { - clearInterval(this.state.timer); - } + const tick = useCallback(() => { + // Rate limiting updating viewport controls as it triggers lots of renders + if (lastUpdate && Date.now() - lastUpdate > TICK) { + const setCV = props.setControlValue; + if (setCV) { + setCV('viewport', viewState); + } + setLastUpdate(null); + } + }, [lastUpdate, props.setControlValue, viewState]); - onViewStateChange({ viewState }: { viewState: JsonObject }) { - this.setState({ viewState: viewState as Viewport, lastUpdate: Date.now() }); - } + useEffect(() => { + const timer = setInterval(tick, TICK); + return clearInterval(timer); + }, [tick]); - tick() { - // Rate limiting updating viewport controls as it triggers lotsa renders - const { lastUpdate } = this.state; - if (lastUpdate && Date.now() - lastUpdate > TICK) { - const setCV = this.props.setControlValue; - if (setCV) { - setCV('viewport', this.state.viewState); + useEffect(() => { + if (!isEqual(props.viewport, prevViewport)) { + setViewState(props.viewport); } - this.setState({ lastUpdate: null }); - } - } + }, [prevViewport, props.viewport]); - layers() { - // Support for layer factory - if (this.props.layers.some(l => typeof l === 'function')) { - return this.props.layers.map(l => - typeof l === 'function' ? l() : l, - ) as Layer[]; - } - - return this.props.layers as Layer[]; - } + const onViewStateChange = useCallback( + ({ viewState }: { viewState: JsonObject }) => { + setViewState(viewState as Viewport); + setLastUpdate(Date.now()); + }, + [], + ); - setTooltip = (tooltip: TooltipProps['tooltip']) => { - this.setState({ tooltip }); - }; + const layers = useCallback(() => { + // Support for layer factory + if (props.layers.some(l => typeof l === 'function')) { + return props.layers.map(l => + typeof l === 'function' ? l() : l, + ) as Layer[]; + } - render() { - const { children = null, height, width } = this.props; - const { viewState, tooltip } = this.state; + return props.layers as Layer[]; + }, [props.layers]); - const layers = this.layers(); + const { children = null, height, width } = props; return ( <> @@ -121,15 +110,15 @@ export class DeckGLContainer extends React.Component< controller width={width} height={height} - layers={layers} + layers={layers()} viewState={viewState} glOptions={{ preserveDrawingBuffer: true }} - onViewStateChange={this.onViewStateChange} + onViewStateChange={onViewStateChange} > {children} @@ -137,8 +126,8 @@ export class DeckGLContainer extends React.Component< ); - } -} + }), +); export const DeckGLContainerStyledWrapper = styled(DeckGLContainer)` .deckgl-tooltip > div { @@ -146,3 +135,7 @@ export const DeckGLContainerStyledWrapper = styled(DeckGLContainer)` text-overflow: ellipsis; } `; + +export type DeckGLContainerHandle = typeof DeckGLContainer & { + setTooltip: (tooltip: ReactNode) => void; +}; diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.tsx index 5cfa02f704561..540b094219cf4 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.tsx @@ -19,7 +19,7 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import React, { memo, useCallback, useEffect, useRef, useState } from 'react'; import { isEqual } from 'lodash'; import { Datasource, @@ -28,11 +28,12 @@ import { JsonValue, QueryFormData, SupersetClient, + usePrevious, } from '@superset-ui/core'; import { Layer } from 'deck.gl/typed'; import { - DeckGLContainer, + DeckGLContainerHandle, DeckGLContainerStyledWrapper, } from '../DeckGLContainer'; import { getExploreLongUrl } from '../utils/explore'; @@ -52,120 +53,97 @@ export type DeckMultiProps = { onSelect: () => void; }; -export type DeckMultiState = { - subSlicesLayers: Record; - viewport?: Viewport; -}; - -class DeckMulti extends React.PureComponent { - containerRef = React.createRef(); - - constructor(props: DeckMultiProps) { - super(props); - this.state = { subSlicesLayers: {} }; - this.onViewportChange = this.onViewportChange.bind(this); - } - - componentDidMount() { - const { formData, payload } = this.props; - this.loadLayers(formData, payload); - } - - UNSAFE_componentWillReceiveProps(nextProps: DeckMultiProps) { - const { formData, payload } = nextProps; - const hasChanges = !isEqual( - this.props.formData.deck_slices, - nextProps.formData.deck_slices, - ); - if (hasChanges) { - this.loadLayers(formData, payload); - } - } - - onViewportChange(viewport: Viewport) { - this.setState({ viewport }); - } - - loadLayers( - formData: QueryFormData, - payload: JsonObject, - viewport?: Viewport, - ) { - this.setState({ subSlicesLayers: {}, viewport }); - payload.data.slices.forEach( - (subslice: { slice_id: number } & JsonObject) => { - // Filters applied to multi_deck are passed down to underlying charts - // note that dashboard contextual information (filter_immune_slices and such) aren't - // taken into consideration here - const filters = [ - ...(subslice.form_data.filters || []), - ...(formData.filters || []), - ...(formData.extra_filters || []), - ]; - const subsliceCopy = { - ...subslice, - form_data: { - ...subslice.form_data, - filters, - }, - }; - - const url = getExploreLongUrl(subsliceCopy.form_data, 'json'); +const DeckMulti = (props: DeckMultiProps) => { + const containerRef = useRef(); - if (url) { - SupersetClient.get({ - endpoint: url, - }) - .then(({ json }) => { - const layer = layerGenerators[subsliceCopy.form_data.viz_type]( - subsliceCopy.form_data, - json, - this.props.onAddFilter, - this.setTooltip, - this.props.datasource, - [], - this.props.onSelect, - ); - this.setState({ - subSlicesLayers: { - ...this.state.subSlicesLayers, - [subsliceCopy.slice_id]: layer, - }, - }); - }) - .catch(() => {}); - } - }, - ); - } + const [viewport, setViewport] = useState(); + const [subSlicesLayers, setSubSlicesLayers] = useState>( + {}, + ); - setTooltip = (tooltip: TooltipProps['tooltip']) => { - const { current } = this.containerRef; + const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => { + const { current } = containerRef; if (current) { current.setTooltip(tooltip); } - }; - - render() { - const { payload, formData, setControlValue, height, width } = this.props; - const { subSlicesLayers } = this.state; - - const layers = Object.values(subSlicesLayers); - - return ( - - ); - } -} + }, []); + + const loadLayers = useCallback( + (formData: QueryFormData, payload: JsonObject, viewport?: Viewport) => { + setViewport(viewport); + setSubSlicesLayers({}); + payload.data.slices.forEach( + (subslice: { slice_id: number } & JsonObject) => { + // Filters applied to multi_deck are passed down to underlying charts + // note that dashboard contextual information (filter_immune_slices and such) aren't + // taken into consideration here + const filters = [ + ...(subslice.form_data.filters || []), + ...(formData.filters || []), + ...(formData.extra_filters || []), + ]; + const subsliceCopy = { + ...subslice, + form_data: { + ...subslice.form_data, + filters, + }, + }; + + const url = getExploreLongUrl(subsliceCopy.form_data, 'json'); + + if (url) { + SupersetClient.get({ + endpoint: url, + }) + .then(({ json }) => { + const layer = layerGenerators[subsliceCopy.form_data.viz_type]( + subsliceCopy.form_data, + json, + props.onAddFilter, + setTooltip, + props.datasource, + [], + props.onSelect, + ); + setSubSlicesLayers(subSlicesLayers => ({ + ...subSlicesLayers, + [subsliceCopy.slice_id]: layer, + })); + }) + .catch(() => {}); + } + }, + ); + }, + [props.datasource, props.onAddFilter, props.onSelect, setTooltip], + ); + + const prevDeckSlices = usePrevious(props.formData.deck_slices); + useEffect(() => { + const { formData, payload } = props; + const hasChanges = !isEqual(prevDeckSlices, formData.deck_slices); + if (hasChanges) { + loadLayers(formData, payload); + } + }, [loadLayers, prevDeckSlices, props]); + + const { payload, formData, setControlValue, height, width } = props; + const layers = Object.values(subSlicesLayers); + + return ( + + ); +}; -export default DeckMulti; +export default memo(DeckMulti); diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/TooltipRow.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/TooltipRow.tsx index 9d72f719fe645..3e69258556ce1 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/TooltipRow.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/TooltipRow.tsx @@ -23,15 +23,11 @@ type TooltipRowProps = { value: string; }; -export default class TooltipRow extends React.PureComponent { - render() { - const { label, value } = this.props; +const TooltipRow = ({ label, value }: TooltipRowProps) => ( +
+ {label} + {value} +
+); - return ( -
- {label} - {value} -
- ); - } -} +export default TooltipRow; diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/factory.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/factory.tsx index 4ddde91247de8..fb1255a2fdc32 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/factory.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/factory.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import React, { memo, useCallback, useEffect, useRef, useState } from 'react'; import { isEqual } from 'lodash'; import { Layer } from 'deck.gl/typed'; import { @@ -24,11 +24,12 @@ import { QueryFormData, JsonObject, HandlerFunction, + usePrevious, } from '@superset-ui/core'; import { DeckGLContainerStyledWrapper, - DeckGLContainer, + DeckGLContainerHandle, } from './DeckGLContainer'; import CategoricalDeckGLContainer from './CategoricalDeckGLContainer'; import fitViewport, { Viewport } from './utils/fitViewport'; @@ -57,91 +58,73 @@ export interface getLayerType { interface getPointsType { (data: JsonObject[]): Point[]; } -type deckGLComponentState = { - viewport: Viewport; - layer: Layer; -}; export function createDeckGLComponent( getLayer: getLayerType, getPoints: getPointsType, -): React.ComponentClass { +) { // Higher order component - class Component extends React.PureComponent< - deckGLComponentProps, - deckGLComponentState - > { - containerRef: React.RefObject = React.createRef(); - - constructor(props: deckGLComponentProps) { - super(props); - + return memo((props: deckGLComponentProps) => { + const containerRef = useRef(); + const prevFormData = usePrevious(props.formData); + const prevPayload = usePrevious(props.payload); + const getAdjustedViewport = () => { const { width, height, formData } = props; - let { viewport } = props; if (formData.autozoom) { - viewport = fitViewport(viewport, { + return fitViewport(props.viewport, { width, height, points: getPoints(props.payload.data.features), }) as Viewport; } + return props.viewport; + }; - this.state = { - viewport, - layer: this.computeLayer(props), - }; - this.onViewportChange = this.onViewportChange.bind(this); - } + const [viewport, setViewport] = useState(getAdjustedViewport()); - UNSAFE_componentWillReceiveProps(nextProps: deckGLComponentProps) { - // Only recompute the layer if anything BUT the viewport has changed - const nextFdNoVP = { ...nextProps.formData, viewport: null }; - const currFdNoVP = { ...this.props.formData, viewport: null }; - if ( - !isEqual(nextFdNoVP, currFdNoVP) || - nextProps.payload !== this.props.payload - ) { - this.setState({ layer: this.computeLayer(nextProps) }); + const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => { + const { current } = containerRef; + if (current) { + current?.setTooltip(tooltip); } - } + }, []); - onViewportChange(viewport: Viewport) { - this.setState({ viewport }); - } + const computeLayer = useCallback( + (props: deckGLComponentProps) => { + const { formData, payload, onAddFilter } = props; - computeLayer(props: deckGLComponentProps) { - const { formData, payload, onAddFilter } = props; + return getLayer(formData, payload, onAddFilter, setTooltip) as Layer; + }, + [setTooltip], + ); - return getLayer(formData, payload, onAddFilter, this.setTooltip) as Layer; - } + const [layer, setLayer] = useState(computeLayer(props)); - setTooltip = (tooltip: TooltipProps['tooltip']) => { - const { current } = this.containerRef; - if (current) { - current?.setTooltip(tooltip); + useEffect(() => { + // Only recompute the layer if anything BUT the viewport has changed + const prevFdNoVP = { ...prevFormData, viewport: null }; + const currFdNoVP = { ...props.formData, viewport: null }; + if (!isEqual(prevFdNoVP, currFdNoVP) || prevPayload !== props.payload) { + setLayer(computeLayer(props)); } - }; + }, [computeLayer, prevFormData, prevPayload, props]); - render() { - const { formData, payload, setControlValue, height, width } = this.props; - const { layer, viewport } = this.state; + const { formData, payload, setControlValue, height, width } = props; - return ( - - ); - } - } - return Component; + return ( + + ); + }); } export function createCategoricalDeckGLComponent( diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.tsx index 4aa827e45bea5..c8c9d4863ce9b 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import React, { memo, useCallback, useMemo, useRef } from 'react'; import { GeoJsonLayer } from 'deck.gl/typed'; import geojsonExtent from '@mapbox/geojson-extent'; import { @@ -27,7 +27,7 @@ import { } from '@superset-ui/core'; import { - DeckGLContainer, + DeckGLContainerHandle, DeckGLContainerStyledWrapper, } from '../../DeckGLContainer'; import { hexToRGB } from '../../utils/colors'; @@ -164,21 +164,19 @@ export type DeckGLGeoJsonProps = { width: number; }; -class DeckGLGeoJson extends React.Component { - containerRef = React.createRef(); - - setTooltip = (tooltip: TooltipProps['tooltip']) => { - const { current } = this.containerRef; +const DeckGLGeoJson = (props: DeckGLGeoJsonProps) => { + const containerRef = useRef(); + const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => { + const { current } = containerRef; if (current) { current.setTooltip(tooltip); } - }; + }, []); - render() { - const { formData, payload, setControlValue, onAddFilter, height, width } = - this.props; + const { formData, payload, setControlValue, onAddFilter, height, width } = + props; - let { viewport } = this.props; + const viewport: Viewport = useMemo(() => { if (formData.autozoom) { const points = payload?.data?.features?.reduce?.( @@ -194,29 +192,36 @@ class DeckGLGeoJson extends React.Component { ) || []; if (points.length) { - viewport = fitViewport(viewport, { + return fitViewport(props.viewport, { width, height, points, }); } } + return props.viewport; + }, [ + formData.autozoom, + height, + payload?.data?.features, + props.viewport, + width, + ]); - const layer = getLayer(formData, payload, onAddFilter, this.setTooltip); - - return ( - - ); - } -} + const layer = getLayer(formData, payload, onAddFilter, setTooltip); + + return ( + + ); +}; -export default DeckGLGeoJson; +export default memo(DeckGLGeoJson); diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Polygon/Polygon.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Polygon/Polygon.tsx index 627125c398199..460c4a3b51969 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Polygon/Polygon.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Polygon/Polygon.tsx @@ -21,7 +21,7 @@ */ /* eslint no-underscore-dangle: ["error", { "allow": ["", "__timestamp"] }] */ -import React from 'react'; +import React, { memo, useCallback, useEffect, useRef, useState } from 'react'; import { HandlerFunction, JsonObject, @@ -41,7 +41,7 @@ import sandboxedEval from '../../utils/sandbox'; import getPointsFromPolygon from '../../utils/getPointsFromPolygon'; import fitViewport, { Viewport } from '../../utils/fitViewport'; import { - DeckGLContainer, + DeckGLContainerHandle, DeckGLContainerStyledWrapper, } from '../../DeckGLContainer'; import { TooltipProps } from '../../components/Tooltip'; @@ -173,145 +173,134 @@ export type DeckGLPolygonProps = { height: number; }; -export type DeckGLPolygonState = { - lastClick: number; - viewport: Viewport; - formData: PolygonFormData; - selected: JsonObject[]; -}; - -class DeckGLPolygon extends React.PureComponent< - DeckGLPolygonProps, - DeckGLPolygonState -> { - containerRef = React.createRef(); - - constructor(props: DeckGLPolygonProps) { - super(props); - - this.state = DeckGLPolygon.getDerivedStateFromProps( - props, - ) as DeckGLPolygonState; - - this.getLayers = this.getLayers.bind(this); - this.onSelect = this.onSelect.bind(this); - } - - static getDerivedStateFromProps( - props: DeckGLPolygonProps, - state?: DeckGLPolygonState, - ) { - const { width, height, formData, payload } = props; - - // the state is computed only from the payload; if it hasn't changed, do - // not recompute state since this would reset selections and/or the play - // slider position due to changes in form controls - if (state && payload.form_data === state.formData) { - return null; - } - - const features = payload.data.features || []; +const DeckGLPolygon = (props: DeckGLPolygonProps) => { + const containerRef = useRef(); - let { viewport } = props; - if (formData.autozoom) { + const getAdjustedViewport = useCallback(() => { + let viewport = { ...props.viewport }; + if (props.formData.autozoom) { + const features = props.payload.data.features || []; viewport = fitViewport(viewport, { - width, - height, + width: props.width, + height: props.height, points: features.flatMap(getPointsFromPolygon), }); } + if (viewport.zoom < 0) { + viewport.zoom = 0; + } + return viewport; + }, [props]); + + const [lastClick, setLastClick] = useState(0); + const [viewport, setViewport] = useState(getAdjustedViewport()); + const [stateFormData, setStateFormData] = useState(props.payload.form_data); + const [selected, setSelected] = useState([]); + + useEffect(() => { + const { payload } = props; + + if (payload.form_data !== stateFormData) { + setViewport(getAdjustedViewport()); + setSelected([]); + setLastClick(0); + setStateFormData(payload.form_data); + } + }, [getAdjustedViewport, props, stateFormData, viewport]); - return { - viewport, - selected: [], - lastClick: 0, - formData: payload.form_data, - }; - } - - onSelect(polygon: JsonObject) { - const { formData, onAddFilter } = this.props; - - const now = new Date().getDate(); - const doubleClick = now - this.state.lastClick <= DOUBLE_CLICK_THRESHOLD; - - // toggle selected polygons - const selected = [...this.state.selected]; - if (doubleClick) { - selected.splice(0, selected.length, polygon); - } else if (formData.toggle_polygons) { - const i = selected.indexOf(polygon); - if (i === -1) { - selected.push(polygon); + const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => { + const { current } = containerRef; + if (current) { + current.setTooltip(tooltip); + } + }, []); + + const onSelect = useCallback( + (polygon: JsonObject) => { + const { formData, onAddFilter } = props; + + const now = new Date().getDate(); + const doubleClick = now - lastClick <= DOUBLE_CLICK_THRESHOLD; + + // toggle selected polygons + const selectedCopy = [...selected]; + if (doubleClick) { + selectedCopy.splice(0, selectedCopy.length, polygon); + } else if (formData.toggle_polygons) { + const i = selectedCopy.indexOf(polygon); + if (i === -1) { + selectedCopy.push(polygon); + } else { + selectedCopy.splice(i, 1); + } } else { - selected.splice(i, 1); + selectedCopy.splice(0, 1, polygon); } - } else { - selected.splice(0, 1, polygon); - } - this.setState({ selected, lastClick: now }); - if (formData.table_filter) { - onAddFilter(formData.line_column, selected, false, true); - } - } + setSelected(selectedCopy); + setLastClick(now); + if (formData.table_filter) { + onAddFilter(formData.line_column, selected, false, true); + } + }, + [lastClick, props, selected], + ); - getLayers() { - if (this.props.payload.data.features === undefined) { + const getLayers = useCallback(() => { + if (props.payload.data.features === undefined) { return []; } const layer = getLayer( - this.props.formData, - this.props.payload, - this.props.onAddFilter, - this.setTooltip, - this.state.selected, - this.onSelect, + props.formData, + props.payload, + props.onAddFilter, + setTooltip, + selected, + onSelect, ); return [layer]; - } - - setTooltip = (tooltip: TooltipProps['tooltip']) => { - const { current } = this.containerRef; - if (current) { - current.setTooltip(tooltip); - } - }; - - render() { - const { payload, formData, setControlValue } = this.props; - - const fd = formData; - const metricLabel = fd.metric ? fd.metric.label || fd.metric : null; - const accessor = (d: JsonObject) => d[metricLabel]; - - const buckets = getBuckets(formData, payload.data.features, accessor); + }, [ + onSelect, + props.formData, + props.onAddFilter, + props.payload, + selected, + setTooltip, + ]); + + const { payload, formData, setControlValue } = props; + + const metricLabel = formData.metric + ? formData.metric.label || formData.metric + : null; + const accessor = (d: JsonObject) => d[metricLabel]; - return ( -
- + + + {formData.metric !== null && ( + + )} +
+ ); +}; - {formData.metric !== null && ( - - )} - - ); - } -} - -export default DeckGLPolygon; +export default memo(DeckGLPolygon); diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/Screengrid.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/Screengrid.tsx index 173770c6c1aac..7e47cc9530c04 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/Screengrid.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Screengrid/Screengrid.tsx @@ -20,7 +20,7 @@ */ /* eslint no-underscore-dangle: ["error", { "allow": ["", "__timestamp"] }] */ -import React from 'react'; +import React, { memo, useCallback, useEffect, useRef, useState } from 'react'; import { ScreenGridLayer } from 'deck.gl/typed'; import { JsonObject, JsonValue, QueryFormData, t } from '@superset-ui/core'; import { noop } from 'lodash'; @@ -30,7 +30,7 @@ import TooltipRow from '../../TooltipRow'; // eslint-disable-next-line import/extensions import fitViewport, { Viewport } from '../../utils/fitViewport'; import { - DeckGLContainer, + DeckGLContainerHandle, DeckGLContainerStyledWrapper, } from '../../DeckGLContainer'; import { TooltipProps } from '../../components/Tooltip'; @@ -99,93 +99,63 @@ export type DeckGLScreenGridProps = { onAddFilter: () => void; }; -export type DeckGLScreenGridState = { - viewport: Viewport; - formData: QueryFormData; -}; - -class DeckGLScreenGrid extends React.PureComponent< - DeckGLScreenGridProps, - DeckGLScreenGridState -> { - containerRef = React.createRef(); - - constructor(props: DeckGLScreenGridProps) { - super(props); - - this.state = DeckGLScreenGrid.getDerivedStateFromProps( - props, - ) as DeckGLScreenGridState; - - this.getLayers = this.getLayers.bind(this); - } - - static getDerivedStateFromProps( - props: DeckGLScreenGridProps, - state?: DeckGLScreenGridState, - ) { - // the state is computed only from the payload; if it hasn't changed, do - // not recompute state since this would reset selections and/or the play - // slider position due to changes in form controls - if (state && props.payload.form_data === state.formData) { - return null; - } +const DeckGLScreenGrid = (props: DeckGLScreenGridProps) => { + const containerRef = useRef(); + const getAdjustedViewport = useCallback(() => { const features = props.payload.data.features || []; const { width, height, formData } = props; - let { viewport } = props; if (formData.autozoom) { - viewport = fitViewport(viewport, { + return fitViewport(props.viewport, { width, height, points: getPoints(features), }); } + return props.viewport; + }, [props]); - return { - viewport, - formData: props.payload.form_data as QueryFormData, - }; - } + const [stateFormData, setStateFormData] = useState(props.payload.form_data); + const [viewport, setViewport] = useState(getAdjustedViewport()); - getLayers() { - const layer = getLayer( - this.props.formData, - this.props.payload, - noop, - this.setTooltip, - ); - - return [layer]; - } + useEffect(() => { + if (props.payload.form_data !== stateFormData) { + setViewport(getAdjustedViewport()); + setStateFormData(props.payload.form_data); + } + }, [getAdjustedViewport, props.payload.form_data, stateFormData]); - setTooltip = (tooltip: TooltipProps['tooltip']) => { - const { current } = this.containerRef; + const setTooltip = useCallback((tooltip: TooltipProps['tooltip']) => { + const { current } = containerRef; if (current) { current.setTooltip(tooltip); } - }; - - render() { - const { formData, payload, setControlValue } = this.props; - - return ( -
- -
- ); - } -} + }, []); + + const getLayers = useCallback(() => { + const layer = getLayer(props.formData, props.payload, noop, setTooltip); + + return [layer]; + }, [props.formData, props.payload, setTooltip]); + + const { formData, payload, setControlValue } = props; + + return ( +
+ +
+ ); +}; -export default DeckGLScreenGrid; +export default memo(DeckGLScreenGrid); diff --git a/superset-frontend/src/SqlLab/components/HighlightedSql/index.tsx b/superset-frontend/src/SqlLab/components/HighlightedSql/index.tsx index c646144fa7b52..67b4df665db2a 100644 --- a/superset-frontend/src/SqlLab/components/HighlightedSql/index.tsx +++ b/superset-frontend/src/SqlLab/components/HighlightedSql/index.tsx @@ -25,7 +25,7 @@ import ModalTrigger from 'src/components/ModalTrigger'; SyntaxHighlighter.registerLanguage('sql', sql); -interface HighlightedSqlProps { +export interface HighlightedSqlProps { sql: string; rawSql?: string; maxWidth?: number; diff --git a/superset-frontend/src/SqlLab/components/ResultSet/index.tsx b/superset-frontend/src/SqlLab/components/ResultSet/index.tsx index 845c784a34a1e..f1c48531f0e82 100644 --- a/superset-frontend/src/SqlLab/components/ResultSet/index.tsx +++ b/superset-frontend/src/SqlLab/components/ResultSet/index.tsx @@ -27,8 +27,11 @@ import { QueryState, styled, t, + tn, useTheme, usePrevious, + css, + getNumberFormatter, } from '@superset-ui/core'; import ErrorMessageWithStackTrace from 'src/components/ErrorMessage/ErrorMessageWithStackTrace'; import { @@ -42,6 +45,9 @@ import { mountExploreUrl } from 'src/explore/exploreUtils'; import { postFormData } from 'src/explore/exploreUtils/formData'; import ProgressBar from 'src/components/ProgressBar'; import Loading from 'src/components/Loading'; +import Card from 'src/components/Card'; +import Label from 'src/components/Label'; +import { Tooltip } from 'src/components/Tooltip'; import FilterableTable from 'src/components/FilterableTable'; import CopyToClipboard from 'src/components/CopyToClipboard'; import { addDangerToast } from 'src/components/MessageToasts/actions'; @@ -55,6 +61,7 @@ import { reRunQuery, } from 'src/SqlLab/actions/sqlLab'; import { URL_PARAMS } from 'src/constants'; +import Icons from 'src/components/Icons'; import ExploreCtasResultsButton from '../ExploreCtasResultsButton'; import ExploreResultsButton from '../ExploreResultsButton'; import HighlightedSql from '../HighlightedSql'; @@ -76,6 +83,7 @@ export interface ResultSetProps { query: QueryResponse; search?: boolean; showSql?: boolean; + showSqlInline?: boolean; visualize?: boolean; user: UserWithPermissionsAndRoles; defaultQueryLimit: number; @@ -110,7 +118,7 @@ const MonospaceDiv = styled.div` const ReturnedRows = styled.div` font-size: ${({ theme }) => theme.typography.sizes.s}px; - line-height: ${({ theme }) => theme.gridUnit * 6}px; + line-height: 1; `; const ResultSetControls = styled.div` @@ -124,10 +132,8 @@ const ResultSetButtons = styled.div` padding-right: ${({ theme }) => 2 * theme.gridUnit}px; `; -const LimitMessage = styled.span` - color: ${({ theme }) => theme.colors.secondary.light1}; - margin-left: ${({ theme }) => theme.gridUnit * 2}px; -`; +const ROWS_CHIP_WIDTH = 100; +const GAP = 8; const ResultSet = ({ cache = false, @@ -138,6 +144,7 @@ const ResultSet = ({ query, search = true, showSql = false, + showSqlInline = false, visualize = true, user, defaultQueryLimit, @@ -296,7 +303,7 @@ const ResultSet = ({ const renderRowsReturned = () => { const { results, rows, queryLimit, limitingFactor } = query; - let limitMessage; + let limitMessage = ''; const limitReached = results?.displayLimitReached; const limit = queryLimit || results.query.limit; const isAdmin = !!user?.roles?.Admin; @@ -339,7 +346,7 @@ const ResultSet = ({ { rows }, ); } - + const formattedRowCount = getNumberFormatter()(rows); const rowsReturnedMessage = t('%(rows)d rows returned', { rows, }); @@ -349,10 +356,27 @@ const ResultSet = ({ return ( {!limitReached && !shouldUseDefaultDropdownAlert && ( - - {rowsReturnedMessage} - {limitMessage} - + + + )} {!limitReached && shouldUseDefaultDropdownAlert && (
@@ -413,7 +437,12 @@ const ResultSet = ({ } if (showSql) { - sql = ; + sql = ( + + ); } if (query.state === QueryState.STOPPED) { @@ -501,8 +530,39 @@ const ResultSet = ({ return ( {renderControls()} - {renderRowsReturned()} - {sql} + {showSql && showSqlInline ? ( +
+ + {sql} + + {renderRowsReturned()} +
+ ) : ( + <> + {renderRowsReturned()} + {sql} + + )} col.column_name)} diff --git a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx index d601018a6e1bd..38a20f9f6df07 100644 --- a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx +++ b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx @@ -189,6 +189,8 @@ const SouthPane = ({ database={databases[latestQuery.dbId]} displayLimit={displayLimit} defaultQueryLimit={defaultQueryLimit} + showSql + showSqlInline /> ); } diff --git a/superset-frontend/src/components/Select/Select.tsx b/superset-frontend/src/components/Select/Select.tsx index 770ef1df4a44f..89c62ef8bd98f 100644 --- a/superset-frontend/src/components/Select/Select.tsx +++ b/superset-frontend/src/components/Select/Select.tsx @@ -208,13 +208,13 @@ const Select = forwardRef( () => !isSingleMode && allowSelectAll && - fullSelectOptions.length > 0 && + selectOptions.length > 0 && enabledOptions.length > 1 && !inputValue, [ isSingleMode, allowSelectAll, - fullSelectOptions.length, + selectOptions.length, enabledOptions.length, inputValue, ], diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 45d8d3efabb41..2ccbbeb25e5f3 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -114,12 +114,12 @@ export const useExploreAdditionalActionsMenu = ( onOpenPropertiesModal, ownState, dashboards, + ...rest ) => { const theme = useTheme(); const { addDangerToast, addSuccessToast } = useToasts(); const [showReportSubMenu, setShowReportSubMenu] = useState(null); const [isDropdownVisible, setIsDropdownVisible] = useState(false); - const [openSubmenus, setOpenSubmenus] = useState([]); const chart = useSelector( state => state.charts?.[getChartKey(state.explore)], ); @@ -204,23 +204,19 @@ export const useExploreAdditionalActionsMenu = ( case MENU_KEYS.EXPORT_TO_CSV: exportCSV(); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.EXPORT_TO_CSV_PIVOTED: exportCSVPivoted(); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.EXPORT_TO_JSON: exportJson(); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.EXPORT_TO_XLSX: exportExcel(); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.DOWNLOAD_AS_IMAGE: downloadAsImage( @@ -230,21 +226,17 @@ export const useExploreAdditionalActionsMenu = ( true, )(domEvent); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.COPY_PERMALINK: copyLink(); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.EMBED_CODE: setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.SHARE_BY_EMAIL: shareByEmail(); setIsDropdownVisible(false); - setOpenSubmenus([]); break; case MENU_KEYS.VIEW_QUERY: setIsDropdownVisible(false); @@ -272,12 +264,7 @@ export const useExploreAdditionalActionsMenu = ( const menu = useMemo( () => ( - + <> {slice && ( @@ -423,7 +410,6 @@ export const useExploreAdditionalActionsMenu = ( handleMenuClick, isDropdownVisible, latestQueryFormData, - openSubmenus, showReportSubMenu, slice, theme.gridUnit, diff --git a/superset-frontend/src/features/reports/ReportModal/HeaderReportDropdown/index.tsx b/superset-frontend/src/features/reports/ReportModal/HeaderReportDropdown/index.tsx index cd428b0abb9ab..b38d44a710b78 100644 --- a/superset-frontend/src/features/reports/ReportModal/HeaderReportDropdown/index.tsx +++ b/superset-frontend/src/features/reports/ReportModal/HeaderReportDropdown/index.tsx @@ -114,6 +114,7 @@ export default function HeaderReportDropDown({ setShowReportSubMenu, setIsDropdownVisible, isDropdownVisible, + ...rest }: HeaderReportProps) { const dispatch = useDispatch(); const report = useSelector(state => { @@ -214,7 +215,7 @@ export default function HeaderReportDropDown({ const textMenu = () => isEmpty(report) ? ( - + {DropdownItemExtension ? ( diff --git a/superset/db_engine_specs/README.md b/superset/db_engine_specs/README.md index 17441607681a3..7f84a81029915 100644 --- a/superset/db_engine_specs/README.md +++ b/superset/db_engine_specs/README.md @@ -159,7 +159,7 @@ GROUP BY ### `time_secondary_columns = False` -Datasets can have a main datatime column (`main_dttm_col`), but can also have secondary time columns. When this attribute is true, wheneve the secondary columns are filtered, the same filter is applied to the main datetime column. +Datasets can have a main datetime column (`main_dttm_col`), but can also have secondary time columns. When this attribute is true, wheneve the secondary columns are filtered, the same filter is applied to the main datetime column. This might be useful if you have a table partitioned on a daily `ds` column in Hive (which doesn't support indexes), and a secondary column with the timestamp of the events, ie: diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index ebeea81ec6faa..4ca71340a95f2 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -1040,6 +1040,24 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: query object""" # TODO: Fix circular import error caused by importing sql_lab.Query + @classmethod + def execute_with_cursor( + cls, cursor: Any, sql: str, query: Query, session: Session + ) -> None: + """ + Trigger execution of a query and handle the resulting cursor. + + For most implementations this just makes calls to `execute` and + `handle_cursor` consecutively, but in some engines (e.g. Trino) we may + need to handle client limitations such as lack of async support and + perform a more complicated operation to get information from the cursor + in a timely manner and facilitate operations such as query stop + """ + logger.debug("Query %d: Running query: %s", query.id, sql) + cls.execute(cursor, sql, async_=True) + logger.debug("Query %d: Handling cursor", query.id) + cls.handle_cursor(cursor, query, session) + @classmethod def extract_error_message(cls, ex: Exception) -> str: return f"{cls.engine} error: {cls._extract_error_message(ex)}" diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index c05865b871a73..dfb82877a678d 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -527,12 +527,13 @@ def _latest_partition_from_df(cls, df: pd.DataFrame) -> list[str] | None: @classmethod @cache_manager.data_cache.memoize(timeout=60) - def latest_partition( + def latest_partition( # pylint: disable=too-many-arguments cls, table_name: str, schema: str | None, database: Database, show_first: bool = False, + indexes: list[dict[str, Any]] | None = None, ) -> tuple[list[str], list[str] | None]: """Returns col name and the latest (max) partition value for a table @@ -542,12 +543,15 @@ def latest_partition( :type database: models.Database :param show_first: displays the value for the first partitioning key if there are many partitioning keys + :param indexes: indexes from the database :type show_first: bool >>> latest_partition('foo_table') (['ds'], ('2018-01-01',)) """ - indexes = database.get_indexes(table_name, schema) + if indexes is None: + indexes = database.get_indexes(table_name, schema) + if not indexes: raise SupersetTemplateException( f"Error getting partition for {schema}.{table_name}. " @@ -1221,7 +1225,7 @@ def extra_table_metadata( if indexes := database.get_indexes(table_name, schema_name): col_names, latest_parts = cls.latest_partition( - table_name, schema_name, database, show_first=True + table_name, schema_name, database, show_first=True, indexes=indexes ) if not latest_parts: diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index da0a56e10033a..425137e302e6b 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -18,6 +18,8 @@ import contextlib import logging +import threading +import time from typing import Any, TYPE_CHECKING import simplejson as json @@ -58,7 +60,11 @@ def extra_table_metadata( if indexes := database.get_indexes(table_name, schema_name): col_names, latest_parts = cls.latest_partition( - table_name, schema_name, database, show_first=True + table_name, + schema_name, + database, + show_first=True, + indexes=indexes, ) if not latest_parts: @@ -147,14 +153,21 @@ def get_tracking_url(cls, cursor: Cursor) -> str | None: @classmethod def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None: - if tracking_url := cls.get_tracking_url(cursor): - query.tracking_url = tracking_url + """ + Handle a trino client cursor. + + WARNING: if you execute a query, it will block until complete and you + will not be able to handle the cursor until complete. Use + `execute_with_cursor` instead, to handle this asynchronously. + """ # Adds the executed query id to the extra payload so the query can be cancelled - query.set_extra_json_key( - key=QUERY_CANCEL_KEY, - value=(cancel_query_id := cursor.stats["queryId"]), - ) + cancel_query_id = cursor.query_id + logger.debug("Query %d: queryId %s found in cursor", query.id, cancel_query_id) + query.set_extra_json_key(key=QUERY_CANCEL_KEY, value=cancel_query_id) + + if tracking_url := cls.get_tracking_url(cursor): + query.tracking_url = tracking_url session.commit() @@ -169,6 +182,51 @@ def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None: super().handle_cursor(cursor=cursor, query=query, session=session) + @classmethod + def execute_with_cursor( + cls, cursor: Any, sql: str, query: Query, session: Session + ) -> None: + """ + Trigger execution of a query and handle the resulting cursor. + + Trino's client blocks until the query is complete, so we need to run it + in another thread and invoke `handle_cursor` to poll for the query ID + to appear on the cursor in parallel. + """ + execute_result: dict[str, Any] = {} + + def _execute(results: dict[str, Any]) -> None: + logger.debug("Query %d: Running query: %s", query.id, sql) + + # Pass result / exception information back to the parent thread + try: + cls.execute(cursor, sql) + results["complete"] = True + except Exception as ex: # pylint: disable=broad-except + results["complete"] = True + results["error"] = ex + + execute_thread = threading.Thread(target=_execute, args=(execute_result,)) + execute_thread.start() + + # Wait for a query ID to be available before handling the cursor, as + # it's required by that method; it may never become available on error. + while not cursor.query_id and not execute_result.get("complete"): + time.sleep(0.1) + + logger.debug("Query %d: Handling cursor", query.id) + cls.handle_cursor(cursor, query, session) + + # Block until the query completes; same behaviour as the client itself + logger.debug("Query %d: Waiting for query to complete", query.id) + while not execute_result.get("complete"): + time.sleep(0.5) + + # Unfortunately we'll mangle the stack trace due to the thread, but + # throwing the original exception allows mapping database errors as normal + if err := execute_result.get("error"): + raise err + @classmethod def prepare_cancel_query(cls, query: Query, session: Session) -> None: if QUERY_CANCEL_KEY not in query.extra: diff --git a/superset/legacy.py b/superset/legacy.py index 7c25d2de984df..52b63a013b7c0 100644 --- a/superset/legacy.py +++ b/superset/legacy.py @@ -17,10 +17,27 @@ """Code related with dealing with legacy / change management""" from typing import Any +from superset import is_feature_enabled + def update_time_range(form_data: dict[str, Any]) -> None: - """Move since and until to time_range.""" + """ + Legacy adjustments to time range. + + - Move `since` and `until` to `time_range`. + - Define `time_range` when `granularity_sqla` is set and unfiltered. + + """ if "since" in form_data or "until" in form_data: - form_data[ - "time_range" - ] = f'{form_data.pop("since", "") or ""} : {form_data.pop("until", "") or ""}' + since = form_data.pop("since", "") or "" + until = form_data.pop("until", "") or "" + form_data["time_range"] = f"{since} : {until}" + + if is_feature_enabled("GENERIC_CHART_AXES"): + if temporal_column := form_data.get("granularity_sqla"): + if any( + adhoc_filter.get("subject") == temporal_column + and adhoc_filter.get("comparator") == "No filter" + for adhoc_filter in form_data.get("adhoc_filters", []) + ): + form_data.setdefault("time_range", "No filter") diff --git a/superset/sql_lab.py b/superset/sql_lab.py index cb8da3ef0444c..4d71e23d88cee 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -191,7 +191,7 @@ def get_sql_results( # pylint: disable=too-many-arguments return handle_query_error(ex, query, session) -def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statements +def execute_sql_statement( # pylint: disable=too-many-arguments sql_statement: str, query: Query, session: Session, @@ -217,6 +217,7 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statem ) sql = parsed_query.stripped() + # This is a test to see if the query is being # limited by either the dropdown or the sql. # We are testing to see if more rows exist than the limit. @@ -270,10 +271,7 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statem ) session.commit() with stats_timing("sqllab.query.time_executing_query", stats_logger): - logger.debug("Query %d: Running query: %s", query.id, sql) - db_engine_spec.execute(cursor, sql, async_=True) - logger.debug("Query %d: Handling cursor", query.id) - db_engine_spec.handle_cursor(cursor, query, session) + db_engine_spec.execute_with_cursor(cursor, sql, query, session) with stats_timing("sqllab.query.time_fetching_results", stats_logger): logger.debug( @@ -512,8 +510,13 @@ def execute_sql_statements( ex, query, session, payload, prefix_message ) return payload - # Commit the connection so CTA queries will create the table. - if apply_ctas: + + # Commit the connection so CTA queries will create the table and any DML. + should_commit = ( + not db_engine_spec.is_select_query(parsed_query) # check if query is DML + or apply_ctas + ) + if should_commit: conn.commit() # Success, updating the query entry in database diff --git a/superset/sql_parse.py b/superset/sql_parse.py index 2a283b81f0fc9..34fc354730536 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -244,46 +244,52 @@ def is_select(self) -> bool: # make sure we strip comments; prevents a bug with comments in the CTE parsed = sqlparse.parse(self.strip_comments()) - # Check if this is a CTE - if parsed[0].is_group and parsed[0][0].ttype == Keyword.CTE: - if sqloxide_parse is not None: - try: - if not self._check_cte_is_select( - sqloxide_parse(self.strip_comments(), dialect="ansi") - ): - return False - except ValueError: - # sqloxide was not able to parse the query, so let's continue with - # sqlparse - pass - inner_cte = self.get_inner_cte_expression(parsed[0].tokens) or [] - # Check if the inner CTE is a not a SELECT - if any(token.ttype == DDL for token in inner_cte) or any( + for statement in parsed: + # Check if this is a CTE + if statement.is_group and statement[0].ttype == Keyword.CTE: + if sqloxide_parse is not None: + try: + if not self._check_cte_is_select( + sqloxide_parse(self.strip_comments(), dialect="ansi") + ): + return False + except ValueError: + # sqloxide was not able to parse the query, so let's continue with + # sqlparse + pass + inner_cte = self.get_inner_cte_expression(statement.tokens) or [] + # Check if the inner CTE is a not a SELECT + if any(token.ttype == DDL for token in inner_cte) or any( + token.ttype == DML and token.normalized != "SELECT" + for token in inner_cte + ): + return False + + if statement.get_type() == "SELECT": + continue + + if statement.get_type() != "UNKNOWN": + return False + + # for `UNKNOWN`, check all DDL/DML explicitly: only `SELECT` DML is allowed, + # and no DDL is allowed + if any(token.ttype == DDL for token in statement) or any( token.ttype == DML and token.normalized != "SELECT" - for token in inner_cte + for token in statement ): return False - if parsed[0].get_type() == "SELECT": - return True - - if parsed[0].get_type() != "UNKNOWN": - return False - - # for `UNKNOWN`, check all DDL/DML explicitly: only `SELECT` DML is allowed, - # and no DDL is allowed - if any(token.ttype == DDL for token in parsed[0]) or any( - token.ttype == DML and token.normalized != "SELECT" for token in parsed[0] - ): - return False + # return false on `EXPLAIN`, `SET`, `SHOW`, etc. + if statement[0].ttype == Keyword: + return False - # return false on `EXPLAIN`, `SET`, `SHOW`, etc. - if parsed[0][0].ttype == Keyword: - return False + if not any( + token.ttype == DML and token.normalized == "SELECT" + for token in statement + ): + return False - return any( - token.ttype == DML and token.normalized == "SELECT" for token in parsed[0] - ) + return True def get_inner_cte_expression(self, tokens: TokenList) -> Optional[TokenList]: for token in tokens: diff --git a/superset/templates/superset/form_view/csv_to_database_view/edit.html b/superset/templates/superset/form_view/csv_to_database_view/edit.html index 0ea4b7b0e5fdd..7f50c2f978642 100644 --- a/superset/templates/superset/form_view/csv_to_database_view/edit.html +++ b/superset/templates/superset/form_view/csv_to_database_view/edit.html @@ -68,10 +68,6 @@ {{ lib.render_field(form.parse_dates, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }} - - {{ lib.render_field(form.infer_datetime_format, begin_sep_label, - end_sep_label, begin_sep_field, end_sep_field) }} - {{ lib.render_field(form.day_first, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }} diff --git a/superset/views/database/forms.py b/superset/views/database/forms.py index 36bd43e2fe1a2..9e3ba500af465 100644 --- a/superset/views/database/forms.py +++ b/superset/views/database/forms.py @@ -197,10 +197,6 @@ class CsvToDatabaseForm(UploadToDatabaseForm): ), filters=[filter_not_empty_values], ) - infer_datetime_format = BooleanField( - _("Interpret Datetime Format Automatically"), - description=_("Interpret the datetime format automatically"), - ) day_first = BooleanField( _("Day First"), description=_("DD/MM format dates, international and European format"), diff --git a/superset/views/database/views.py b/superset/views/database/views.py index 6f9344339eaee..0a91df2d6f087 100644 --- a/superset/views/database/views.py +++ b/superset/views/database/views.py @@ -167,7 +167,6 @@ def form_get(self, form: CsvToDatabaseForm) -> None: form.overwrite_duplicate.data = True form.skip_initial_space.data = False form.skip_blank_lines.data = True - form.infer_datetime_format.data = True form.day_first.data = False form.decimal.data = "." form.if_exists.data = "fail" @@ -199,7 +198,6 @@ def form_post(self, form: CsvToDatabaseForm) -> Response: filepath_or_buffer=form.csv_file.data, header=form.header.data if form.header.data else 0, index_col=form.index_col.data, - infer_datetime_format=form.infer_datetime_format.data, dayfirst=form.day_first.data, iterator=True, keep_default_na=not form.null_values.data, diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py index 963953d18b48e..1b50a683a0841 100644 --- a/tests/unit_tests/db_engine_specs/test_trino.py +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -352,7 +352,7 @@ def test_handle_cursor_early_cancel( query_id = "myQueryId" cursor_mock = engine_mock.return_value.__enter__.return_value - cursor_mock.stats = {"queryId": query_id} + cursor_mock.query_id = query_id session_mock = mocker.MagicMock() query = Query() @@ -366,3 +366,32 @@ def test_handle_cursor_early_cancel( assert cancel_query_mock.call_args[1]["cancel_query_id"] == query_id else: assert cancel_query_mock.call_args is None + + +def test_execute_with_cursor_in_parallel(mocker: MockerFixture): + """Test that `execute_with_cursor` fetches query ID from the cursor""" + from superset.db_engine_specs.trino import TrinoEngineSpec + + query_id = "myQueryId" + + mock_cursor = mocker.MagicMock() + mock_cursor.query_id = None + + mock_query = mocker.MagicMock() + mock_session = mocker.MagicMock() + + def _mock_execute(*args, **kwargs): + mock_cursor.query_id = query_id + + mock_cursor.execute.side_effect = _mock_execute + + TrinoEngineSpec.execute_with_cursor( + cursor=mock_cursor, + sql="SELECT 1 FROM foo", + query=mock_query, + session=mock_session, + ) + + mock_query.set_extra_json_key.assert_called_once_with( + key=QUERY_CANCEL_KEY, value=query_id + ) diff --git a/tests/unit_tests/legacy_tests.py b/tests/unit_tests/legacy_tests.py new file mode 100644 index 0000000000000..9a8f7fbcd2c1a --- /dev/null +++ b/tests/unit_tests/legacy_tests.py @@ -0,0 +1,100 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, use-implicit-booleaness-not-comparison + +import copy +from typing import Any + +from superset.legacy import update_time_range +from tests.unit_tests.conftest import with_feature_flags + +original_form_data = { + "granularity_sqla": "order_date", + "datasource": "22__table", + "viz_type": "table", + "query_mode": "raw", + "groupby": [], + "time_grain_sqla": "P1D", + "temporal_columns_lookup": {"order_date": True}, + "all_columns": ["order_date", "state", "product_code"], + "percent_metrics": [], + "adhoc_filters": [ + { + "clause": "WHERE", + "subject": "order_date", + "operator": "TEMPORAL_RANGE", + "comparator": "No filter", + "expressionType": "SIMPLE", + } + ], + "order_by_cols": [], + "row_limit": 1000, + "server_page_length": 10, + "order_desc": True, + "table_timestamp_format": "smart_date", + "show_cell_bars": True, + "color_pn": True, + "extra_form_data": {}, + "dashboards": [19], + "force": False, + "result_format": "json", + "result_type": "full", + "include_time": False, +} + + +def test_update_time_range_since_until() -> None: + """ + Tests for the old `since` and `until` parameters. + """ + form_data: dict[str, Any] + + form_data = {} + update_time_range(form_data) + assert form_data == {} + + form_data = {"since": "yesterday"} + update_time_range(form_data) + assert form_data == {"time_range": "yesterday : "} + + form_data = {"until": "tomorrow"} + update_time_range(form_data) + assert form_data == {"time_range": " : tomorrow"} + + form_data = {"since": "yesterday", "until": "tomorrow"} + update_time_range(form_data) + assert form_data == {"time_range": "yesterday : tomorrow"} + + +@with_feature_flags(GENERIC_CHART_AXES=False) +def test_update_time_range_granularity_sqla_no_feature_flag() -> None: + """ + Tests for the unfiltered `granularity_sqla` when `GENERIC_CHART_AXES` is off. + """ + form_data = copy.deepcopy(original_form_data) + update_time_range(form_data) + assert form_data == original_form_data + + +@with_feature_flags(GENERIC_CHART_AXES=True) +def test_update_time_range_granularity_sqla_with_feature_flag() -> None: + """ + Tests for the unfiltered `granularity_sqla` when `GENERIC_CHART_AXES` is on. + """ + form_data = copy.deepcopy(original_form_data) + update_time_range(form_data) + assert form_data["time_range"] == "No filter" diff --git a/tests/unit_tests/sql_lab_test.py b/tests/unit_tests/sql_lab_test.py index 29f45eab682a0..edc1fd2ec4a5d 100644 --- a/tests/unit_tests/sql_lab_test.py +++ b/tests/unit_tests/sql_lab_test.py @@ -55,8 +55,8 @@ def test_execute_sql_statement(mocker: MockerFixture, app: None) -> None: ) database.apply_limit_to_sql.assert_called_with("SELECT 42 AS answer", 2, force=True) - db_engine_spec.execute.assert_called_with( - cursor, "SELECT 42 AS answer LIMIT 2", async_=True + db_engine_spec.execute_with_cursor.assert_called_with( + cursor, "SELECT 42 AS answer LIMIT 2", query, session ) SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) @@ -106,10 +106,8 @@ def test_execute_sql_statement_with_rls( 101, force=True, ) - db_engine_spec.execute.assert_called_with( - cursor, - "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", - async_=True, + db_engine_spec.execute_with_cursor.assert_called_with( + cursor, "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", query, session ) SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py index 7d8839198c430..73074d3df64bf 100644 --- a/tests/unit_tests/sql_parse_tests.py +++ b/tests/unit_tests/sql_parse_tests.py @@ -1616,3 +1616,10 @@ def test_extract_table_references(mocker: MockerFixture) -> None: Table(table="other_table", schema=None, catalog=None) } logger.warning.assert_not_called() + + +def test_is_select() -> None: + """ + Test `is_select`. + """ + assert not ParsedQuery("SELECT 1; DROP DATABASE superset").is_select()