diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e150d476b76..d80537b69c45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ under the License. ## Change Log +- [3.0.2](#302-mon-nov-20-073838-2023--0500) - [3.0.1](#301-tue-oct-13-103221-2023--0700) - [3.0.0](#300-thu-aug-24-133627-2023--0600) - [2.1.1](#211-sun-apr-23-154421-2023-0100) @@ -32,6 +33,59 @@ under the License. - [1.4.2](#142-sat-mar-19-000806-2022-0200) - [1.4.1](#141) +### 3.0.2 (Mon Nov 20 07:38:38 2023 -0500) + +**Fixes** + +- [#26037](https://github.com/apache/superset/pull/26037) fix: update FAB to 4.3.10, Azure user info fix (@dpgaspar) +- [#25901](https://github.com/apache/superset/pull/25901) fix(native filters): rendering performance improvement by reduce overrendering (@justinpark) +- [#25985](https://github.com/apache/superset/pull/25985) fix(explore): redandant force param (@justinpark) +- [#25993](https://github.com/apache/superset/pull/25993) fix: Make Select component fire onChange listener when a selection is pasted in (@jfrag1) +- [#25997](https://github.com/apache/superset/pull/25997) fix(rls): Update text from tables to datasets in RLS modal (@yousoph) +- [#25703](https://github.com/apache/superset/pull/25703) fix(helm): Restart all related deployments when bootstrap script changed (@josedev-union) +- [#25973](https://github.com/apache/superset/pull/25973) fix: naming denomalized to denormalized in helpers.py (@hughhhh) +- [#25919](https://github.com/apache/superset/pull/25919) fix: always denorm column value before querying values (@hughhhh) +- [#25947](https://github.com/apache/superset/pull/25947) fix: update flask-caching to avoid breaking redis cache, solves #25339 (@ggbaro) +- [#25903](https://github.com/apache/superset/pull/25903) fix(sqllab): invalid sanitization on comparison symbol (@justinpark) +- [#25857](https://github.com/apache/superset/pull/25857) fix(table): Double percenting ad-hoc percentage metrics (@john-bodley) +- [#25872](https://github.com/apache/superset/pull/25872) fix(trino): allow impersonate_user flag to be imported (@FGrobelny) +- [#25897](https://github.com/apache/superset/pull/25897) fix: trino cursor (@betodealmeida) +- [#25898](https://github.com/apache/superset/pull/25898) fix: database version field (@betodealmeida) +- [#25877](https://github.com/apache/superset/pull/25877) fix: Saving Mixed Chart with dashboard filter applied breaks adhoc_filter_b (@kgabryje) +- [#25842](https://github.com/apache/superset/pull/25842) fix(charts): Time grain is None when dataset uses Jinja (@Antonio-RiveroMartnez) +- [#25843](https://github.com/apache/superset/pull/25843) fix: remove `update_charts_owners` (@betodealmeida) +- [#25707](https://github.com/apache/superset/pull/25707) fix(table chart): Show Cell Bars correctly #25625 (@SA-Ark) +- [#25429](https://github.com/apache/superset/pull/25429) fix: the temporal x-axis results in a none time_range. (@mapledan) +- [#25853](https://github.com/apache/superset/pull/25853) fix: Fires onChange when clearing all values of single select (@michael-s-molina) +- [#25814](https://github.com/apache/superset/pull/25814) fix(sqllab): infinite fetching status after results are landed (@justinpark) +- [#25768](https://github.com/apache/superset/pull/25768) fix(SQL field in edit dataset modal): display full sql query (@rtexelm) +- [#25804](https://github.com/apache/superset/pull/25804) fix: Resolve issue #24195 (@john-bodley) +- [#25801](https://github.com/apache/superset/pull/25801) fix: Revert "fix: Apply normalization to all dttm columns (#25147)" (@john-bodley) +- [#25779](https://github.com/apache/superset/pull/25779) fix: DB-specific quoting in Jinja macro (@betodealmeida) +- [#25640](https://github.com/apache/superset/pull/25640) fix: allow for backward compatible errors (@eschutho) +- [#25741](https://github.com/apache/superset/pull/25741) fix(sqllab): slow pop datasource query (@justinpark) +- [#25756](https://github.com/apache/superset/pull/25756) fix: dataset update uniqueness (@betodealmeida) +- [#25753](https://github.com/apache/superset/pull/25753) fix: Revert "fix(Charts): Set max row limit + removed the option to use an empty row limit value" (@geido) +- [#25732](https://github.com/apache/superset/pull/25732) fix(horizontal filter label): show full tooltip with ellipsis (@rtexelm) +- [#25712](https://github.com/apache/superset/pull/25712) fix: bump to FAB 4.3.9 remove CSP exception (@dpgaspar) +- [#24709](https://github.com/apache/superset/pull/24709) fix(chore): dashboard requests to database equal the number of slices it has (@Always-prog) +- [#25679](https://github.com/apache/superset/pull/25679) fix: remove unnecessary redirect (@Khrol) +- [#25680](https://github.com/apache/superset/pull/25680) fix(sqllab): reinstate "Force trino client async execution" (@giftig) +- [#25657](https://github.com/apache/superset/pull/25657) fix(dremio): Fixes issue with Dremio SQL generation for Charts with Series Limit (@OskarNS) +- [#23638](https://github.com/apache/superset/pull/23638) fix: warning of nth-child (@justinpark) +- [#25658](https://github.com/apache/superset/pull/25658) fix: improve upload ZIP file validation (@dpgaspar) +- [#25495](https://github.com/apache/superset/pull/25495) fix(header navlinks): link navlinks to path prefix (@fisjac) +- [#25112](https://github.com/apache/superset/pull/25112) fix: permalink save/overwrites in explore (@hughhhh) +- [#25493](https://github.com/apache/superset/pull/25493) fix(import): Make sure query context is overwritten for overwriting imports (@jfrag1) +- [#25553](https://github.com/apache/superset/pull/25553) fix: avoid 500 errors with SQLLAB_BACKEND_PERSISTENCE (@Khrol) +- [#25626](https://github.com/apache/superset/pull/25626) fix(sqllab): template validation error within comments (@justinpark) +- [#25523](https://github.com/apache/superset/pull/25523) fix(sqllab): Mistitled for new tab after rename (@justinpark) + +**Others** + +- [#25995](https://github.com/apache/superset/pull/25995) chore: Optimize fetching samples logic (@john-bodley) +- [#23619](https://github.com/apache/superset/pull/23619) chore(colors): Updating Airbnb brand colors (@john-bodley) + ### 3.0.1 (Tue Oct 13 10:32:21 2023 -0700) **Database Migrations** diff --git a/docs/docs/databases/installing-database-drivers.mdx b/docs/docs/databases/installing-database-drivers.mdx index e4e972f0648b..57652db4b8cb 100644 --- a/docs/docs/databases/installing-database-drivers.mdx +++ b/docs/docs/databases/installing-database-drivers.mdx @@ -22,46 +22,47 @@ as well as the packages needed to connect to the databases you want to access th Some of the recommended packages are shown below. Please refer to [setup.py](https://github.com/apache/superset/blob/master/setup.py) for the versions that are compatible with Superset. -| Database | PyPI package | Connection String | -| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | -| [Amazon Athena](/docs/databases/athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{ ` | -| [Amazon DynamoDB](/docs/databases/dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` | -| [Amazon Redshift](/docs/databases/redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://:@:5439/` | -| [Apache Drill](/docs/databases/drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` | -| [Apache Druid](/docs/databases/druid) | `pip install pydruid` | `druid://:@:/druid/v2/sql` | -| [Apache Hive](/docs/databases/hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | -| [Apache Impala](/docs/databases/impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` | -| [Apache Kylin](/docs/databases/kylin) | `pip install kylinpy` | `kylin://:@:/?=&=` | -| [Apache Pinot](/docs/databases/pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` | -| [Apache Solr](/docs/databases/solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` | -| [Apache Spark SQL](/docs/databases/spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | -| [Ascend.io](/docs/databases/ascend) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` | -| [Azure MS SQL](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` | -| [Big Query](/docs/databases/bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` | -| [ClickHouse](/docs/databases/clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` | -| [CockroachDB](/docs/databases/cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` | -| [Dremio](/docs/databases/dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` | -| [Elasticsearch](/docs/databases/elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` | -| [Exasol](/docs/databases/exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` | -| [Google Sheets](/docs/databases/google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` | -| [Firebolt](/docs/databases/firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{username}:{password}@{database} or firebolt://{username}:{password}@{database}/{engine_name}` | -| [Hologres](/docs/databases/hologres) | `pip install psycopg2` | `postgresql+psycopg2://:@/` | -| [IBM Db2](/docs/databases/ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` | -| [IBM Netezza Performance Server](/docs/databases/netezza) | `pip install nzalchemy` | `netezza+nzpy://:@/` | -| [MySQL](/docs/databases/mysql) | `pip install mysqlclient` | `mysql://:@/` | -| [Oracle](/docs/databases/oracle) | `pip install cx_Oracle` | `oracle://` | -| [PostgreSQL](/docs/databases/postgres) | `pip install psycopg2` | `postgresql://:@/` | -| [Trino](/docs/databases/trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` | -| [Presto](/docs/databases/presto) | `pip install pyhive` | `presto://` | -| [SAP Hana](/docs/databases/hana) | `pip install hdbcli sqlalchemy-hana or pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` | -| [StarRocks](/docs/databases/starrocks) | `pip install starrocks` | `starrocks://:@:/.` | -| [Snowflake](/docs/databases/snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | -| SQLite | No additional library needed | `sqlite://` | -| [SQL Server](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://` | -| [Teradata](/docs/databases/teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` | -| [TimescaleDB](/docs/databases/timescaledb) | `pip install psycopg2` | `postgresql://:@:/` | -| [Vertica](/docs/databases/vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://:@/` | -| [YugabyteDB](/docs/databases/yugabytedb) | `pip install psycopg2` | `postgresql://:@/` | +| Database | PyPI package | Connection String | +| --------------------------------------------------------- | ------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | +| [Amazon Athena](/docs/databases/athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{ ` | +| [Amazon DynamoDB](/docs/databases/dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` | +| [Amazon Redshift](/docs/databases/redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://:@:5439/` | +| [Apache Drill](/docs/databases/drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` | +| [Apache Druid](/docs/databases/druid) | `pip install pydruid` | `druid://:@:/druid/v2/sql` | +| [Apache Hive](/docs/databases/hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | +| [Apache Impala](/docs/databases/impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` | +| [Apache Kylin](/docs/databases/kylin) | `pip install kylinpy` | `kylin://:@:/?=&=` | +| [Apache Pinot](/docs/databases/pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` | +| [Apache Solr](/docs/databases/solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` | +| [Apache Spark SQL](/docs/databases/spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | +| [Ascend.io](/docs/databases/ascend) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` | +| [Azure MS SQL](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` | +| [Big Query](/docs/databases/bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` | +| [ClickHouse](/docs/databases/clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` | +| [CockroachDB](/docs/databases/cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` | +| [Dremio](/docs/databases/dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` | +| [Elasticsearch](/docs/databases/elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` | +| [Exasol](/docs/databases/exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` | +| [Google Sheets](/docs/databases/google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` | +| [Firebolt](/docs/databases/firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{username}:{password}@{database} or firebolt://{username}:{password}@{database}/{engine_name}` | +| [Hologres](/docs/databases/hologres) | `pip install psycopg2` | `postgresql+psycopg2://:@/` | +| [IBM Db2](/docs/databases/ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` | +| [IBM Netezza Performance Server](/docs/databases/netezza) | `pip install nzalchemy` | `netezza+nzpy://:@/` | +| [MySQL](/docs/databases/mysql) | `pip install mysqlclient` | `mysql://:@/` | +| [Oracle](/docs/databases/oracle) | `pip install cx_Oracle` | `oracle://` | +| [PostgreSQL](/docs/databases/postgres) | `pip install psycopg2` | `postgresql://:@/` | +| [Trino](/docs/databases/trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` | +| [Presto](/docs/databases/presto) | `pip install pyhive` | `presto://` | +| [SAP Hana](/docs/databases/hana) | `pip install hdbcli sqlalchemy-hana or pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` | +| [StarRocks](/docs/databases/starrocks) | `pip install starrocks` | `starrocks://:@:/.` | +| [Snowflake](/docs/databases/snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | +| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` | +| [SQL Server](/docs/databases/sql-server) | `pip install pymssql` | `mssql+pymssql://` | +| [Teradata](/docs/databases/teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` | +| [TimescaleDB](/docs/databases/timescaledb) | `pip install psycopg2` | `postgresql://:@:/` | +| [Vertica](/docs/databases/vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://:@/` | +| [YugabyteDB](/docs/databases/yugabytedb) | `pip install psycopg2` | `postgresql://:@/` | + --- Note that many other databases are supported, the main criteria being the existence of a functional diff --git a/docs/docs/frequently-asked-questions.mdx b/docs/docs/frequently-asked-questions.mdx index bbb94d617b98..79a0863b088d 100644 --- a/docs/docs/frequently-asked-questions.mdx +++ b/docs/docs/frequently-asked-questions.mdx @@ -168,7 +168,7 @@ Another workaround is to change where superset stores the sqlite database by add `superset_config.py`: ``` -SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db' +SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db?check_same_thread=false' ``` You can read more about customizing Superset using the configuration file diff --git a/docs/docs/installation/configuring-superset.mdx b/docs/docs/installation/configuring-superset.mdx index 9cb3aaefacc7..c6108d6f59c8 100644 --- a/docs/docs/installation/configuring-superset.mdx +++ b/docs/docs/installation/configuring-superset.mdx @@ -32,7 +32,9 @@ SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY' # superset metadata (slices, connections, tables, dashboards, ...). # Note that the connection information to connect to the datasources # you want to explore are managed directly in the web UI -SQLALCHEMY_DATABASE_URI = 'sqlite:////path/to/superset.db' +# The check_same_thread=false property ensures the sqlite client does not attempt +# to enforce single-threaded access, which may be problematic in some edge cases +SQLALCHEMY_DATABASE_URI = 'sqlite:////path/to/superset.db?check_same_thread=false' # Flask-WTF flag for CSRF WTF_CSRF_ENABLED = True diff --git a/helm/superset/Chart.yaml b/helm/superset/Chart.yaml index 2aa2bc49a317..83f4119791ab 100644 --- a/helm/superset/Chart.yaml +++ b/helm/superset/Chart.yaml @@ -15,7 +15,7 @@ # limitations under the License. # apiVersion: v2 -appVersion: "3.0.0" +appVersion: "3.0.2" description: Apache Superset is a modern, enterprise-ready business intelligence web application name: superset icon: https://artifacthub.io/image/68c1d717-0e97-491f-b046-754e46f46922@2x @@ -29,7 +29,7 @@ maintainers: - name: craig-rueda email: craig@craigrueda.com url: https://github.com/craig-rueda -version: 0.10.9 +version: 0.10.15 dependencies: - name: postgresql version: 12.1.6 diff --git a/helm/superset/README.md b/helm/superset/README.md index 38c69c38b524..b8d438500895 100644 --- a/helm/superset/README.md +++ b/helm/superset/README.md @@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs # superset -![Version: 0.10.9](https://img.shields.io/badge/Version-0.10.9-informational?style=flat-square) +![Version: 0.10.15](https://img.shields.io/badge/Version-0.10.15-informational?style=flat-square) Apache Superset is a modern, enterprise-ready business intelligence web application diff --git a/helm/superset/templates/deployment-beat.yaml b/helm/superset/templates/deployment-beat.yaml index 43754efb0614..eab9a6f3eb4f 100644 --- a/helm/superset/templates/deployment-beat.yaml +++ b/helm/superset/templates/deployment-beat.yaml @@ -42,6 +42,7 @@ spec: metadata: annotations: checksum/superset_config.py: {{ include "superset-config" . | sha256sum }} + checksum/superset_bootstrap.sh: {{ tpl .Values.bootstrapScript . | sha256sum }} checksum/connections: {{ .Values.supersetNode.connections | toYaml | sha256sum }} checksum/extraConfigs: {{ .Values.extraConfigs | toYaml | sha256sum }} checksum/extraSecrets: {{ .Values.extraSecrets | toYaml | sha256sum }} diff --git a/helm/superset/templates/deployment-worker.yaml b/helm/superset/templates/deployment-worker.yaml index 7f2bcf8df3cd..be710b723a04 100644 --- a/helm/superset/templates/deployment-worker.yaml +++ b/helm/superset/templates/deployment-worker.yaml @@ -46,6 +46,7 @@ spec: metadata: annotations: checksum/superset_config.py: {{ include "superset-config" . | sha256sum }} + checksum/superset_bootstrap.sh: {{ tpl .Values.bootstrapScript . | sha256sum }} checksum/connections: {{ .Values.supersetNode.connections | toYaml | sha256sum }} checksum/extraConfigs: {{ .Values.extraConfigs | toYaml | sha256sum }} checksum/extraSecrets: {{ .Values.extraSecrets | toYaml | sha256sum }} diff --git a/requirements/base.txt b/requirements/base.txt index d6ee2e6a6b9e..1d2d568efcd9 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -27,8 +27,9 @@ billiard==3.6.4.0 # via celery brotli==1.0.9 # via flask-compress -cachelib==0.6.0 - # via flask-caching +cachelib==0.9.0 + # via + # flask-caching celery==5.2.2 # via apache-superset cffi==1.15.1 @@ -88,11 +89,11 @@ flask==2.2.5 # flask-migrate # flask-sqlalchemy # flask-wtf -flask-appbuilder==4.3.7 +flask-appbuilder==4.3.10 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder -flask-caching==1.11.1 +flask-caching==2.1.0 # via apache-superset flask-compress==1.13 # via apache-superset diff --git a/setup.py b/setup.py index 3cb0c144b2f5..1bd979c10740 100644 --- a/setup.py +++ b/setup.py @@ -80,8 +80,8 @@ def get_git_sha() -> str: "cryptography>=39.0.1, <40", "deprecation>=2.1.0, <2.2.0", "flask>=2.2.5, <3.0.0", - "flask-appbuilder>=4.3.7, <5.0.0", - "flask-caching>=1.11.1, <2.0", + "flask-appbuilder>=4.3.10, <5.0.0", + "flask-caching>=2.1.0, <3", "flask-compress>=1.13, <2.0", "flask-talisman>=1.0.0, <2.0", "flask-login>=0.6.0, < 1.0", diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts index b35105a7b591..e4d645bd2e08 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts @@ -515,7 +515,7 @@ describe('Dashboard edit', () => { // label Anthony cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .eq(2) - .should('have.css', 'fill', 'rgb(0, 122, 135)'); + .should('have.css', 'fill', 'rgb(244, 176, 42)'); // open main tab and nested tab openTab(0, 0); @@ -526,7 +526,7 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() - .should('have.css', 'fill', 'rgb(0, 122, 135)'); + .should('have.css', 'fill', 'rgb(244, 176, 42)'); }); it('should apply the color scheme across main tabs', () => { @@ -557,7 +557,7 @@ describe('Dashboard edit', () => { cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(204, 0, 134)'); + .should('have.css', 'fill', 'rgb(156, 52, 152)'); // change scheme now that charts are rendered across the main tabs editDashboard(); diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js index 770e1e1c04d3..591ba3177693 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/dist_bar.test.js @@ -89,6 +89,6 @@ describe('Visualization > Distribution bar chart', () => { ).should('exist'); cy.get('.dist_bar .nv-legend .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(255, 90, 95)'); + .should('have.css', 'fill', 'rgb(41, 105, 107)'); }); }); diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts index 5cc398c7f3ef..8499db594681 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/line.test.ts @@ -85,7 +85,7 @@ describe('Visualization > Line', () => { ).should('exist'); cy.get('.line .nv-legend .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(255, 90, 95)'); + .should('have.css', 'fill', 'rgb(41, 105, 107)'); }); it('should work with adhoc metric', () => { diff --git a/superset-frontend/package.json b/superset-frontend/package.json index b21ce1b6d7c8..38a8e332c9bc 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -1,6 +1,6 @@ { "name": "superset", - "version": "3.0.1", + "version": "3.0.2", "description": "Superset is a data exploration platform designed to be visual, intuitive, and interactive.", "keywords": [ "big", diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx index 69fa8a686490..abf5153bb0d5 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx @@ -47,8 +47,6 @@ import { isDefined, hasGenericChartAxes, NO_TIME_RANGE, - validateNonEmpty, - validateMaxValue, } from '@superset-ui/core'; import { @@ -247,12 +245,7 @@ const row_limit: SharedControlConfig<'SelectControl'> = { type: 'SelectControl', freeForm: true, label: t('Row limit'), - clearable: false, - validators: [ - validateNonEmpty, - legacyValidateInteger, - v => validateMaxValue(v, 100000), - ], + validators: [legacyValidateInteger], default: 10000, choices: formatSelectOptions(ROW_LIMIT_OPTIONS), description: t('Limits the number of rows that get displayed.'), diff --git a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts index 1c4d278f6cc4..b3884a848801 100644 --- a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts +++ b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts @@ -58,7 +58,6 @@ export enum AppSection { export type FilterState = { value?: any; [key: string]: any }; export type DataMask = { - __cache?: FilterState; extraFormData?: ExtraFormData; filterState?: FilterState; ownState?: JsonObject; diff --git a/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts b/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts index 462065b84f2b..a126f502a9c3 100644 --- a/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts +++ b/superset-frontend/packages/superset-ui-core/src/color/colorSchemes/categorical/airbnb.ts @@ -24,27 +24,19 @@ const schemes = [ id: 'bnbColors', label: 'Airbnb Colors', colors: [ - '#ff5a5f', // rausch - '#7b0051', // hackb - '#007A87', // kazan - '#00d1c1', // babu - '#8ce071', // lima - '#ffb400', // beach - '#b4a76c', // barol - '#ff8083', - '#cc0086', - '#00a1b3', - '#00ffeb', - '#bbedab', - '#ffd266', - '#cbc29a', - '#ff3339', - '#ff1ab1', - '#005c66', - '#00b3a5', - '#55d12e', - '#b37e00', - '#988b4e', + '#29696B', + '#5BCACE', + '#F4B02A', + '#F1826A', + '#792EB2', + '#C96EC6', + '#921E50', + '#B27700', + '#9C3498', + '#9C3498', + '#E4679D', + '#C32F0E', + '#9D63CA', ], }, ].map(s => new CategoricalScheme(s)); diff --git a/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts b/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts index f7ea0d0e6006..5fb9590c685a 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts @@ -70,10 +70,7 @@ export function normalizeTimeColumn( }; } - const newQueryObject = omit(queryObject, [ - 'extras.time_grain_sqla', - 'is_timeseries', - ]); + const newQueryObject = omit(queryObject, ['is_timeseries']); newQueryObject.columns = mutatedColumns; return newQueryObject; diff --git a/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx b/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx index 8fd06cb6f8e7..9b950e4246e9 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx +++ b/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx @@ -44,6 +44,9 @@ describe('isProbablyHTML', () => { const plainText = 'Just a plain text'; const isHTML = isProbablyHTML(plainText); expect(isHTML).toBe(false); + + const trickyText = 'a <= 10 and b > 10'; + expect(isProbablyHTML(trickyText)).toBe(false); }); }); diff --git a/superset-frontend/packages/superset-ui-core/src/utils/html.tsx b/superset-frontend/packages/superset-ui-core/src/utils/html.tsx index 3215eb9b9de5..fffd43bda8f6 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/html.tsx +++ b/superset-frontend/packages/superset-ui-core/src/utils/html.tsx @@ -28,7 +28,9 @@ export function sanitizeHtml(htmlString: string) { } export function isProbablyHTML(text: string) { - return /<[^>]+>/.test(text); + return Array.from( + new DOMParser().parseFromString(text, 'text/html').body.childNodes, + ).some(({ nodeType }) => nodeType === 1); } export function sanitizeHtmlIfNeeded(htmlString: string) { diff --git a/superset-frontend/packages/superset-ui-core/src/validator/index.ts b/superset-frontend/packages/superset-ui-core/src/validator/index.ts index fb37328c0229..532efcc95911 100644 --- a/superset-frontend/packages/superset-ui-core/src/validator/index.ts +++ b/superset-frontend/packages/superset-ui-core/src/validator/index.ts @@ -22,4 +22,3 @@ export { default as legacyValidateNumber } from './legacyValidateNumber'; export { default as validateInteger } from './validateInteger'; export { default as validateNumber } from './validateNumber'; export { default as validateNonEmpty } from './validateNonEmpty'; -export { default as validateMaxValue } from './validateMaxValue'; diff --git a/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts b/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts deleted file mode 100644 index 24c1da1c79dd..000000000000 --- a/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { t } from '../translation'; - -export default function validateMaxValue(v: unknown, max: Number) { - if (Number(v) > +max) { - return t('Value cannot exceed %s', max); - } - return false; -} diff --git a/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts b/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts index 22189b90551c..2f6bdd0fb507 100644 --- a/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts @@ -92,7 +92,9 @@ describe('GENERIC_CHART_AXES is disabled', () => { datasource: '5__table', viz_type: 'table', granularity: 'time_column', - extras: {}, + extras: { + time_grain_sqla: 'P1Y', + }, time_range: '1 year ago : 2013', orderby: [['count(*)', true]], columns: [ @@ -182,7 +184,7 @@ describe('GENERIC_CHART_AXES is enabled', () => { datasource: '5__table', viz_type: 'table', granularity: 'time_column', - extras: { where: '', having: '' }, + extras: { where: '', having: '', time_grain_sqla: 'P1Y' }, time_range: '1 year ago : 2013', orderby: [['count(*)', true]], columns: [ @@ -240,7 +242,7 @@ describe('GENERIC_CHART_AXES is enabled', () => { datasource: '5__table', viz_type: 'table', granularity: 'time_column', - extras: { where: '', having: '' }, + extras: { where: '', having: '', time_grain_sqla: 'P1Y' }, time_range: '1 year ago : 2013', orderby: [['count(*)', true]], columns: [ diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx index 6cdca623a1c8..dd95ffada5b0 100644 --- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx +++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-plugin-chart-map-box/Stories.tsx @@ -42,7 +42,7 @@ export const Basic = () => { allColumnsY: 'LAT', clusteringRadius: '60', globalOpacity: 1, - mapboxColor: 'rgb(0, 122, 135)', + mapboxColor: 'rgb(244, 176, 42)', mapboxLabel: [], mapboxStyle: 'mapbox://styles/mapbox/light-v9', pandasAggfunc: 'sum', diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/MixedTimeseries/buildQuery.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/MixedTimeseries/buildQuery.test.ts index 15d9e2cbb6e1..7aef4f582a32 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/MixedTimeseries/buildQuery.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/MixedTimeseries/buildQuery.test.ts @@ -307,6 +307,7 @@ test('should convert a queryObject with x-axis although FF is disabled', () => { extras: { having: '', where: "(foo in ('a', 'b'))", + time_grain_sqla: 'P1W', }, applied_time_extras: {}, columns: [ diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/Timeseries/buildQuery.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/Timeseries/buildQuery.test.ts index 611f021af16f..8887342933ca 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/Timeseries/buildQuery.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/Timeseries/buildQuery.test.ts @@ -120,7 +120,7 @@ describe('GENERIC_CHART_AXES is enabled', () => { expect.objectContaining({ granularity: 'time_column', time_range: '1 year ago : 2013', - extras: { having: '', where: '' }, + extras: { having: '', where: '', time_grain_sqla: 'P1Y' }, columns: [ { columnType: 'BASE_AXIS', @@ -209,7 +209,7 @@ describe('GENERIC_CHART_AXES is disabled', () => { expect.objectContaining({ granularity: 'time_column', time_range: '1 year ago : 2013', - extras: { having: '', where: '' }, + extras: { having: '', where: '', time_grain_sqla: 'P1Y' }, columns: [ { columnType: 'BASE_AXIS', diff --git a/superset-frontend/plugins/plugin-chart-pivot-table/src/plugin/buildQuery.ts b/superset-frontend/plugins/plugin-chart-pivot-table/src/plugin/buildQuery.ts index 55a6cefd083a..4cd314e06fdc 100644 --- a/superset-frontend/plugins/plugin-chart-pivot-table/src/plugin/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-pivot-table/src/plugin/buildQuery.ts @@ -16,8 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import omit from 'lodash/omit'; - import { AdhocColumn, buildQueryContext, @@ -72,9 +70,7 @@ export default function buildQuery(formData: PivotTableQueryFormData) { } return [ { - ...(hasGenericChartAxes - ? omit(baseQueryObject, ['extras.time_grain_sqla']) - : baseQueryObject), + ...baseQueryObject, orderby: orderBy, columns, }, diff --git a/superset-frontend/plugins/plugin-chart-pivot-table/test/plugin/buildQuery.test.ts b/superset-frontend/plugins/plugin-chart-pivot-table/test/plugin/buildQuery.test.ts index 0e29de919652..bff468232ff6 100644 --- a/superset-frontend/plugins/plugin-chart-pivot-table/test/plugin/buildQuery.test.ts +++ b/superset-frontend/plugins/plugin-chart-pivot-table/test/plugin/buildQuery.test.ts @@ -121,3 +121,16 @@ test('should fallback to formData.time_grain_sqla if extra_form_data.time_grain_ expressionType: 'SQL', }); }); + +test('should not omit extras.time_grain_sqla from queryContext so dashboards apply them', () => { + Object.defineProperty(supersetCoreModule, 'hasGenericChartAxes', { + value: true, + }); + const modifiedFormData = { + ...formData, + extra_form_data: { time_grain_sqla: TimeGranularity.QUARTER }, + }; + const queryContext = buildQuery(modifiedFormData); + const [query] = queryContext.queries; + expect(query.extras?.time_grain_sqla).toEqual(TimeGranularity.QUARTER); +}); diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index 72bcb71d290f..917abb929a8e 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -409,7 +409,15 @@ export default function TableChart( const getColumnConfigs = useCallback( (column: DataColumnMeta, i: number): ColumnWithLooseAccessor => { - const { key, label, isNumeric, dataType, isMetric, config = {} } = column; + const { + key, + label, + isNumeric, + dataType, + isMetric, + isPercentMetric, + config = {}, + } = column; const columnWidth = Number.isNaN(Number(config.columnWidth)) ? config.columnWidth : Number(config.columnWidth); @@ -438,7 +446,7 @@ export default function TableChart( (config.showCellBars === undefined ? showCellBars : config.showCellBars) && - (isMetric || isRawRecords) && + (isMetric || isRawRecords || isPercentMetric) && getValueRange(key, alignPositiveNegative); let className = ''; diff --git a/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts b/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts index 3c9c00d3c165..0c8970737d86 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts @@ -118,9 +118,10 @@ const processColumns = memoizeOne(function processColumns( // because users can also add things like `MAX(str_col)` as a metric. const isMetric = metricsSet.has(key) && isNumeric(key, records); const isPercentMetric = percentMetricsSet.has(key); - const label = isPercentMetric - ? `%${verboseMap?.[key.replace('%', '')] || key}` - : verboseMap?.[key] || key; + const label = + isPercentMetric && verboseMap?.hasOwnProperty(key.replace('%', '')) + ? `%${verboseMap[key.replace('%', '')]}` + : verboseMap?.[key] || key; const isTime = dataType === GenericDataType.TEMPORAL; const isNumber = dataType === GenericDataType.NUMERIC; const savedFormat = columnFormats?.[key]; diff --git a/superset-frontend/plugins/plugin-chart-table/test/TableChart.test.tsx b/superset-frontend/plugins/plugin-chart-table/test/TableChart.test.tsx index 9bc3d90a09dd..d6998476baa2 100644 --- a/superset-frontend/plugins/plugin-chart-table/test/TableChart.test.tsx +++ b/superset-frontend/plugins/plugin-chart-table/test/TableChart.test.tsx @@ -76,6 +76,7 @@ describe('plugin-chart-table', () => { wrap = mount( , ); + tree = wrap.render(); // returns a CheerioWrapper with jQuery-like API const cells = tree.find('td'); expect(cells).toHaveLength(12); @@ -158,6 +159,7 @@ describe('plugin-chart-table', () => { }), ); const cells = document.querySelectorAll('td'); + expect(document.querySelectorAll('th')[0]).toHaveTextContent('num'); expect(cells[0]).toHaveTextContent('$ 1.23k'); expect(cells[1]).toHaveTextContent('$ 10k'); @@ -242,4 +244,61 @@ describe('plugin-chart-table', () => { expect(getComputedStyle(screen.getByText('N/A')).background).toBe(''); }); }); + + it('render cell bars properly, and only when it is toggled on in both regular and percent metrics', () => { + const props = transformProps({ + ...testData.raw, + rawFormData: { ...testData.raw.rawFormData }, + }); + + props.columns[0].isMetric = true; + + render( + ProviderWrapper({ + children: , + }), + ); + let cells = document.querySelectorAll('div.cell-bar'); + cells.forEach(cell => { + expect(cell).toHaveClass('positive'); + }); + props.columns[0].isMetric = false; + props.columns[0].isPercentMetric = true; + + render( + ProviderWrapper({ + children: , + }), + ); + cells = document.querySelectorAll('div.cell-bar'); + cells.forEach(cell => { + expect(cell).toHaveClass('positive'); + }); + + props.showCellBars = false; + + render( + ProviderWrapper({ + children: , + }), + ); + cells = document.querySelectorAll('td'); + + cells.forEach(cell => { + expect(cell).toHaveClass('test-c7w8t3'); + }); + + props.columns[0].isPercentMetric = false; + props.columns[0].isMetric = true; + + render( + ProviderWrapper({ + children: , + }), + ); + cells = document.querySelectorAll('td'); + cells.forEach(cell => { + expect(cell).toHaveClass('test-c7w8t3'); + }); + }); }); diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js index fbfba6783e8e..d25689e9468f 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.js @@ -1384,8 +1384,14 @@ export function popDatasourceQuery(datasourceKey, sql) { return function (dispatch) { const QUERY_TEXT = t('Query'); const datasetId = datasourceKey.split('__')[0]; + + const queryParams = rison.encode({ + keys: ['none'], + columns: ['name', 'schema', 'database.id', 'select_star'], + }); + return SupersetClient.get({ - endpoint: `/api/v1/dataset/${datasetId}?q=(keys:!(none))`, + endpoint: `/api/v1/dataset/${datasetId}?q=${queryParams}`, }) .then(({ json }) => dispatch( diff --git a/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx b/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx index 5e2a0455b550..d823c586f799 100644 --- a/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx +++ b/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx @@ -95,7 +95,7 @@ const asyncRefetchResultsTableProps = { resultsKey: 'async results key', }, }; -fetchMock.get('glob:*/api/v1/dataset?*', { result: [] }); +fetchMock.get('glob:*/api/v1/dataset/?*', { result: [] }); const middlewares = [thunk]; const mockStore = configureStore(middlewares); diff --git a/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx b/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx index 4cac5c620464..8568bf20809e 100644 --- a/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx +++ b/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx @@ -39,7 +39,7 @@ const mockedProps = { datasource: testQuery, }; -fetchMock.get('glob:*/api/v1/dataset?*', { +fetchMock.get('glob:*/api/v1/dataset/?*', { result: mockdatasets, dataset_count: 3, }); diff --git a/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx b/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx index eba873c83b4b..1932798138ab 100644 --- a/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx +++ b/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx @@ -257,7 +257,7 @@ export const SaveDatasetModal = ({ }); return SupersetClient.get({ - endpoint: `/api/v1/dataset?q=${queryParams}`, + endpoint: `/api/v1/dataset/?q=${queryParams}`, }).then(response => ({ data: response.json.result.map( (r: { table_name: string; id: number; owners: [DatasetOwner] }) => ({ diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js index e6e0a54ed949..6e2db85a8bc4 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js @@ -664,16 +664,27 @@ export default function sqlLabReducer(state = {}, action) { [actions.CLEAR_INACTIVE_QUERIES]() { const { queries } = state; const cleanedQueries = Object.fromEntries( - Object.entries(queries).filter(([, query]) => { - if ( - ['running', 'pending'].includes(query.state) && - Date.now() - query.startDttm > action.interval && - query.progress === 0 - ) { - return false; - } - return true; - }), + Object.entries(queries) + .filter(([, query]) => { + if ( + ['running', 'pending'].includes(query.state) && + Date.now() - query.startDttm > action.interval && + query.progress === 0 + ) { + return false; + } + return true; + }) + .map(([id, query]) => [ + id, + { + ...query, + state: + query.resultsKey && query.results?.status + ? query.results.status + : query.state, + }, + ]), ); return { ...state, queries: cleanedQueries }; }, diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js index 89ddc61f8c8a..e1a234734bca 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +import { QueryState } from '@superset-ui/core'; import sqlLabReducer from 'src/SqlLab/reducers/sqlLab'; import * as actions from 'src/SqlLab/actions/sqlLab'; import { table, initialState as mockState } from '../fixtures'; @@ -388,4 +389,38 @@ describe('sqlLabReducer', () => { newState = sqlLabReducer(newState, actions.refreshQueries({})); }); }); + describe('CLEAR_INACTIVE_QUERIES', () => { + let newState; + let query; + beforeEach(() => { + query = { + id: 'abcd', + changed_on: Date.now(), + startDttm: Date.now(), + state: QueryState.FETCHING, + progress: 100, + resultsKey: 'fa3dccc4-c549-4fbf-93c8-b4fb5a6fb8b7', + cached: false, + }; + }); + it('updates queries that have already been completed', () => { + newState = sqlLabReducer( + { + ...newState, + queries: { + abcd: { + ...query, + results: { + query_id: 1234, + status: QueryState.SUCCESS, + data: [], + }, + }, + }, + }, + actions.clearInactiveQueries(Date.now()), + ); + expect(newState.queries.abcd.state).toBe(QueryState.SUCCESS); + }); + }); }); diff --git a/superset-frontend/src/components/Chart/Chart.jsx b/superset-frontend/src/components/Chart/Chart.jsx index af90ae6b0a08..da9a81516f5e 100644 --- a/superset-frontend/src/components/Chart/Chart.jsx +++ b/superset-frontend/src/components/Chart/Chart.jsx @@ -169,7 +169,7 @@ class Chart extends React.PureComponent { // Create chart with POST request this.props.actions.postChartFormData( this.props.formData, - this.props.force || getUrlParam(URL_PARAMS.force), // allow override via url params force=true + Boolean(this.props.force || getUrlParam(URL_PARAMS.force)), // allow override via url params force=true this.props.timeout, this.props.chartId, this.props.dashboardId, diff --git a/superset-frontend/src/components/Chart/chartAction.js b/superset-frontend/src/components/Chart/chartAction.js index d08070fe4056..6db969ebb94e 100644 --- a/superset-frontend/src/components/Chart/chartAction.js +++ b/superset-frontend/src/components/Chart/chartAction.js @@ -185,7 +185,7 @@ const v1ChartDataRequest = async ( const qs = {}; if (sliceId !== undefined) qs.form_data = `{"slice_id":${sliceId}}`; if (dashboardId !== undefined) qs.dashboard_id = dashboardId; - if (force !== false) qs.force = force; + if (force) qs.force = force; const allowDomainSharding = // eslint-disable-next-line camelcase diff --git a/superset-frontend/src/components/Chart/chartActions.test.js b/superset-frontend/src/components/Chart/chartActions.test.js index 65b008de62f5..b44ca7c8d791 100644 --- a/superset-frontend/src/components/Chart/chartActions.test.js +++ b/superset-frontend/src/components/Chart/chartActions.test.js @@ -51,7 +51,7 @@ describe('chart actions', () => { .callsFake(() => MOCK_URL); getChartDataUriStub = sinon .stub(exploreUtils, 'getChartDataUri') - .callsFake(() => URI(MOCK_URL)); + .callsFake(({ qs }) => URI(MOCK_URL).query(qs)); fakeMetadata = { useLegacyApi: true }; metadataRegistryStub = sinon .stub(chartlib, 'getChartMetadataRegistry') @@ -81,7 +81,7 @@ describe('chart actions', () => { }); it('should query with the built query', async () => { - const actionThunk = actions.postChartFormData({}); + const actionThunk = actions.postChartFormData({}, null); await actionThunk(dispatch); expect(fetchMock.calls(MOCK_URL)).toHaveLength(1); diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx index ffbba4c7db1f..991a354d9070 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx @@ -1131,7 +1131,7 @@ class DatasourceEditor extends React.PureComponent { language="sql" offerEditInModal={false} minLines={20} - maxLines={20} + maxLines={Infinity} readOnly={!this.state.isEditMode} resize="both" /> @@ -1386,7 +1386,7 @@ class DatasourceEditor extends React.PureComponent { const { theme } = this.props; return ( - + {this.renderErrors()} ({ marginBottom: theme.gridUnit * 4 })} diff --git a/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx b/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx index 5bcb705b683d..bb6fce7577bd 100644 --- a/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx @@ -18,30 +18,35 @@ */ import React from 'react'; import { act } from 'react-dom/test-utils'; -import { mount } from 'enzyme'; -import { Provider } from 'react-redux'; +import { + render, + screen, + waitFor, + fireEvent, + cleanup, +} from '@testing-library/react'; import fetchMock from 'fetch-mock'; +import { Provider } from 'react-redux'; import sinon from 'sinon'; -import { supersetTheme, ThemeProvider } from '@superset-ui/core'; - -import waitForComponentToPaint from 'spec/helpers/waitForComponentToPaint'; +import { + supersetTheme, + ThemeProvider, + SupersetClient, +} from '@superset-ui/core'; import { defaultStore as store } from 'spec/helpers/testing-library'; -import Modal from 'src/components/Modal'; import { DatasourceModal } from 'src/components/Datasource'; -import DatasourceEditor from 'src/components/Datasource/DatasourceEditor'; import * as uiCore from '@superset-ui/core'; import mockDatasource from 'spec/fixtures/mockDatasource'; -import { api } from 'src/hooks/apiResources/queryApi'; - -const datasource = mockDatasource['7__table']; +// Define your constants here const SAVE_ENDPOINT = 'glob:*/api/v1/dataset/7'; const SAVE_PAYLOAD = { new: 'data' }; const SAVE_DATASOURCE_ENDPOINT = 'glob:*/api/v1/dataset/7'; const GET_DATASOURCE_ENDPOINT = SAVE_DATASOURCE_ENDPOINT; +const GET_DATABASE_ENDPOINT = 'glob:*/api/v1/database/?q=*'; const mockedProps = { - datasource, + datasource: mockDatasource['7__table'], addSuccessToast: () => {}, addDangerToast: () => {}, onChange: () => {}, @@ -50,80 +55,101 @@ const mockedProps = { onDatasourceSave: sinon.spy(), }; -async function mountAndWait(props = mockedProps) { - const mounted = mount( +let container; +let isFeatureEnabledMock; + +async function renderAndWait(props = mockedProps) { + const { container: renderedContainer } = render( - + + + , - { - wrappingComponent: ThemeProvider, - wrappingComponentProps: { theme: supersetTheme }, - }, ); - await waitForComponentToPaint(mounted); - return mounted; + container = renderedContainer; } +beforeEach(() => { + fetchMock.reset(); + cleanup(); + isFeatureEnabledMock = jest.spyOn(uiCore, 'isFeatureEnabled'); + renderAndWait(); + fetchMock.post(SAVE_ENDPOINT, SAVE_PAYLOAD); + fetchMock.put(SAVE_DATASOURCE_ENDPOINT, {}); + fetchMock.get(GET_DATASOURCE_ENDPOINT, { result: {} }); + fetchMock.get(GET_DATABASE_ENDPOINT, { result: [] }); +}); + +afterEach(() => { + isFeatureEnabledMock.mockRestore(); +}); + describe('DatasourceModal', () => { - let wrapper; - let isFeatureEnabledMock; - beforeEach(async () => { - isFeatureEnabledMock = jest.spyOn(uiCore, 'isFeatureEnabled'); - fetchMock.reset(); - wrapper = await mountAndWait(); + it('renders', async () => { + expect(container).toBeDefined(); }); - afterAll(() => { - isFeatureEnabledMock.restore(); - act(() => { - store.dispatch(api.util.resetApiState()); - }); + it('renders the component', () => { + expect(screen.getByText('Edit Dataset')).toBeInTheDocument(); }); - it('renders', () => { - expect(wrapper.find(DatasourceModal)).toExist(); + it('renders a Modal', async () => { + expect(screen.getByRole('dialog')).toBeInTheDocument(); }); - it('renders a Modal', () => { - expect(wrapper.find(Modal)).toExist(); + it('renders a DatasourceEditor', async () => { + expect(screen.getByTestId('datasource-editor')).toBeInTheDocument(); }); - it('renders a DatasourceEditor', () => { - expect(wrapper.find(DatasourceEditor)).toExist(); + it('renders a legacy data source btn', () => { + const button = screen.getByTestId('datasource-modal-legacy-edit'); + expect(button).toBeInTheDocument(); }); - it('saves on confirm', async () => { - const callsP = fetchMock.post(SAVE_ENDPOINT, SAVE_PAYLOAD); - fetchMock.put(SAVE_DATASOURCE_ENDPOINT, {}); - fetchMock.get(GET_DATASOURCE_ENDPOINT, {}); - act(() => { - wrapper - .find('button[data-test="datasource-modal-save"]') - .props() - .onClick(); + it('disables the save button when the datasource is managed externally', () => { + // the render is currently in a before operation, so it needs to be cleaned up + // we could alternatively move all the renders back into the tests or find a better + // way to automatically render but still allow to pass in props with the tests + cleanup(); + + renderAndWait({ + ...mockedProps, + datasource: { ...mockedProps.datasource, is_managed_externally: true }, }); - await waitForComponentToPaint(wrapper); - act(() => { - const okButton = wrapper.find( - '.ant-modal-confirm .ant-modal-confirm-btns .ant-btn-primary', - ); - okButton.simulate('click'); + const saveButton = screen.getByTestId('datasource-modal-save'); + expect(saveButton).toBeDisabled(); + }); + + it('calls the onDatasourceSave function when the save button is clicked', async () => { + cleanup(); + const onDatasourceSave = jest.fn(); + + renderAndWait({ ...mockedProps, onDatasourceSave }); + const saveButton = screen.getByTestId('datasource-modal-save'); + await act(async () => { + fireEvent.click(saveButton); + const okButton = await screen.findByRole('button', { name: 'OK' }); + okButton.click(); + }); + await waitFor(() => { + expect(onDatasourceSave).toHaveBeenCalled(); }); - await waitForComponentToPaint(wrapper); - // one call to PUT, then one to GET - const expected = [ - 'http://localhost/api/v1/dataset/7', - 'http://localhost/api/v1/dataset/7', - ]; - expect(callsP._calls.map(call => call[0])).toEqual( - expected, - ); /* eslint no-underscore-dangle: 0 */ }); - it('renders a legacy data source btn', () => { - expect( - wrapper.find('button[data-test="datasource-modal-legacy-edit"]'), - ).toExist(); + it('should render error dialog', async () => { + jest + .spyOn(SupersetClient, 'put') + .mockRejectedValue(new Error('Something went wrong')); + await act(async () => { + const saveButton = screen.getByTestId('datasource-modal-save'); + fireEvent.click(saveButton); + const okButton = await screen.findByRole('button', { name: 'OK' }); + okButton.click(); + }); + await act(async () => { + const errorTitle = await screen.findByText('Error saving dataset'); + expect(errorTitle).toBeInTheDocument(); + }); }); }); diff --git a/superset-frontend/src/components/Datasource/DatasourceModal.tsx b/superset-frontend/src/components/Datasource/DatasourceModal.tsx index f9c40c47ba02..031609e09a48 100644 --- a/superset-frontend/src/components/Datasource/DatasourceModal.tsx +++ b/superset-frontend/src/components/Datasource/DatasourceModal.tsx @@ -28,12 +28,13 @@ import { SupersetClient, t, } from '@superset-ui/core'; - import Modal from 'src/components/Modal'; import AsyncEsmComponent from 'src/components/AsyncEsmComponent'; -import { getClientErrorObject } from 'src/utils/getClientErrorObject'; +import { SupersetError } from 'src/components/ErrorMessage/types'; +import ErrorMessageWithStackTrace from 'src/components/ErrorMessage/ErrorMessageWithStackTrace'; import withToasts from 'src/components/MessageToasts/withToasts'; import { useSelector } from 'react-redux'; +import { getClientErrorObject } from 'src/utils/getClientErrorObject'; const DatasourceEditor = AsyncEsmComponent(() => import('./DatasourceEditor')); @@ -202,11 +203,26 @@ const DatasourceModal: FunctionComponent = ({ }) .catch(response => { setIsSaving(false); - getClientErrorObject(response).then(({ error }) => { + getClientErrorObject(response).then(error => { + let errorResponse: SupersetError | undefined; + let errorText: string | undefined; + // sip-40 error response + if (error?.errors?.length) { + errorResponse = error.errors[0]; + } else if (typeof error.error === 'string') { + // backward compatible with old error messages + errorText = error.error; + } modal.error({ - title: t('Error'), - content: error || t('An error has occurred'), + title: t('Error saving dataset'), okButtonProps: { danger: true, className: 'btn-danger' }, + content: ( + + ), }); }); }); diff --git a/superset-frontend/src/components/ErrorMessage/types.ts b/superset-frontend/src/components/ErrorMessage/types.ts index d3fe5bfdf7af..4375a9dec1cf 100644 --- a/superset-frontend/src/components/ErrorMessage/types.ts +++ b/superset-frontend/src/components/ErrorMessage/types.ts @@ -88,7 +88,7 @@ export type ErrorType = ValueOf; // Keep in sync with superset/views/errors.py export type ErrorLevel = 'info' | 'warning' | 'error'; -export type ErrorSource = 'dashboard' | 'explore' | 'sqllab'; +export type ErrorSource = 'dashboard' | 'explore' | 'sqllab' | 'crud'; export type SupersetError | null> = { error_type: ErrorType; diff --git a/superset-frontend/src/components/Select/AsyncSelect.test.tsx b/superset-frontend/src/components/Select/AsyncSelect.test.tsx index e49f00be537a..0bb24b474a0c 100644 --- a/superset-frontend/src/components/Select/AsyncSelect.test.tsx +++ b/superset-frontend/src/components/Select/AsyncSelect.test.tsx @@ -840,6 +840,48 @@ test('does not fire onChange when searching but no selection', async () => { expect(onChange).toHaveBeenCalledTimes(1); }); +test('fires onChange when clearing the selection in single mode', async () => { + const onChange = jest.fn(); + render( + , + ); + clearAll(); + expect(onChange).toHaveBeenCalledTimes(1); +}); + +test('fires onChange when clearing the selection in multiple mode', async () => { + const onChange = jest.fn(); + render( + , + ); + clearAll(); + expect(onChange).toHaveBeenCalledTimes(1); +}); + +test('fires onChange when pasting a selection', async () => { + const onChange = jest.fn(); + render(); + await open(); + const input = getElementByClassName('.ant-select-selection-search-input'); + const paste = createEvent.paste(input, { + clipboardData: { + getData: () => OPTIONS[0].label, + }, + }); + fireEvent(input, paste); + expect(onChange).toHaveBeenCalledTimes(1); +}); + test('does not duplicate options when using numeric values', async () => { render( { expect(onChange).toHaveBeenCalledTimes(1); }); +test('fires onChange when clearing the selection in single mode', async () => { + const onChange = jest.fn(); + render( + , + ); + clearAll(); + expect(onChange).toHaveBeenCalledTimes(1); +}); + +test('fires onChange when pasting a selection', async () => { + const onChange = jest.fn(); + render( { @@ -571,6 +571,7 @@ const Select = forwardRef( ]); } } + fireOnChange(); }; return ( diff --git a/superset-frontend/src/constants.ts b/superset-frontend/src/constants.ts index bc2b0be843d3..b707a48e04fa 100644 --- a/superset-frontend/src/constants.ts +++ b/superset-frontend/src/constants.ts @@ -51,6 +51,10 @@ export const URL_PARAMS = { name: 'filter_set', type: 'string', }, + showFilters: { + name: 'show_filters', + type: 'boolean', + }, expandFilters: { name: 'expand_filters', type: 'boolean', diff --git a/superset-frontend/src/dashboard/components/Dashboard.jsx b/superset-frontend/src/dashboard/components/Dashboard.jsx index 827f0f455d3d..6e909f3b1527 100644 --- a/superset-frontend/src/dashboard/components/Dashboard.jsx +++ b/superset-frontend/src/dashboard/components/Dashboard.jsx @@ -25,9 +25,8 @@ import Loading from 'src/components/Loading'; import getBootstrapData from 'src/utils/getBootstrapData'; import getChartIdsFromLayout from '../util/getChartIdsFromLayout'; import getLayoutComponentFromChartId from '../util/getLayoutComponentFromChartId'; -import DashboardBuilder from './DashboardBuilder/DashboardBuilder'; + import { - chartPropShape, slicePropShape, dashboardInfoPropShape, dashboardStatePropShape, @@ -53,7 +52,6 @@ const propTypes = { }).isRequired, dashboardInfo: dashboardInfoPropShape.isRequired, dashboardState: dashboardStatePropShape.isRequired, - charts: PropTypes.objectOf(chartPropShape).isRequired, slices: PropTypes.objectOf(slicePropShape).isRequired, activeFilters: PropTypes.object.isRequired, chartConfiguration: PropTypes.object, @@ -213,11 +211,6 @@ class Dashboard extends React.PureComponent { } } - // return charts in array - getAllCharts() { - return Object.values(this.props.charts); - } - applyFilters() { const { appliedFilters } = this; const { activeFilters, ownDataCharts } = this.props; @@ -288,11 +281,7 @@ class Dashboard extends React.PureComponent { if (this.context.loading) { return ; } - return ( - <> - - - ); + return this.props.children; } } diff --git a/superset-frontend/src/dashboard/components/Dashboard.test.jsx b/superset-frontend/src/dashboard/components/Dashboard.test.jsx index 56a696f91314..a66eab37e37d 100644 --- a/superset-frontend/src/dashboard/components/Dashboard.test.jsx +++ b/superset-frontend/src/dashboard/components/Dashboard.test.jsx @@ -21,7 +21,6 @@ import { shallow } from 'enzyme'; import sinon from 'sinon'; import Dashboard from 'src/dashboard/components/Dashboard'; -import DashboardBuilder from 'src/dashboard/components/DashboardBuilder/DashboardBuilder'; import { CHART_TYPE } from 'src/dashboard/util/componentTypes'; import newComponentFactory from 'src/dashboard/util/newComponentFactory'; @@ -63,8 +62,14 @@ describe('Dashboard', () => { loadStats: {}, }; + const ChildrenComponent = () =>
Test
; + function setup(overrideProps) { - const wrapper = shallow(); + const wrapper = shallow( + + + , + ); return wrapper; } @@ -76,9 +81,9 @@ describe('Dashboard', () => { '3_country_name': { values: ['USA'], scope: [] }, }; - it('should render a DashboardBuilder', () => { + it('should render the children component', () => { const wrapper = setup(); - expect(wrapper.find(DashboardBuilder)).toExist(); + expect(wrapper.find(ChildrenComponent)).toExist(); }); describe('UNSAFE_componentWillReceiveProps', () => { diff --git a/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts b/superset-frontend/src/dashboard/components/SyncDashboardState/SyncDashboardState.test.tsx similarity index 52% rename from superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts rename to superset-frontend/src/dashboard/components/SyncDashboardState/SyncDashboardState.test.tsx index 70f3d332c52e..1565a43e1965 100644 --- a/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts +++ b/superset-frontend/src/dashboard/components/SyncDashboardState/SyncDashboardState.test.tsx @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,23 +16,19 @@ * specific language governing permissions and limitations * under the License. */ +import React from 'react'; +import { render } from 'spec/helpers/testing-library'; +import { getItem, LocalStorageKeys } from 'src/utils/localStorageHelpers'; +import SyncDashboardState from '.'; -import { validateMaxValue } from '@superset-ui/core'; -import './setup'; - -describe('validateInteger()', () => { - it('returns the warning message if invalid', () => { - expect(validateMaxValue(10.1, 10)).toBeTruthy(); - expect(validateMaxValue(1, 0)).toBeTruthy(); - expect(validateMaxValue('2', 1)).toBeTruthy(); +test('stores the dashboard info with local storages', () => { + const testDashboardPageId = 'dashboardPageId'; + render(, { + useRedux: true, }); - it('returns false if the input is valid', () => { - expect(validateMaxValue(0, 1)).toBeFalsy(); - expect(validateMaxValue(10, 10)).toBeFalsy(); - expect(validateMaxValue(undefined, 1)).toBeFalsy(); - expect(validateMaxValue(NaN, NaN)).toBeFalsy(); - expect(validateMaxValue(null, 1)).toBeFalsy(); - expect(validateMaxValue('1', 1)).toBeFalsy(); - expect(validateMaxValue('a', 1)).toBeFalsy(); + expect(getItem(LocalStorageKeys.dashboard__explore_context, {})).toEqual({ + [testDashboardPageId]: expect.objectContaining({ + dashboardPageId: testDashboardPageId, + }), }); }); diff --git a/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx b/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx new file mode 100644 index 000000000000..b25d24329225 --- /dev/null +++ b/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React, { useEffect } from 'react'; +import pick from 'lodash/pick'; +import { shallowEqual, useSelector } from 'react-redux'; +import { DashboardContextForExplore } from 'src/types/DashboardContextForExplore'; +import { + getItem, + LocalStorageKeys, + setItem, +} from 'src/utils/localStorageHelpers'; +import { RootState } from 'src/dashboard/types'; +import { getActiveFilters } from 'src/dashboard/util/activeDashboardFilters'; + +type Props = { dashboardPageId: string }; + +const EMPTY_OBJECT = {}; + +export const getDashboardContextLocalStorage = () => { + const dashboardsContexts = getItem( + LocalStorageKeys.dashboard__explore_context, + {}, + ); + // A new dashboard tab id is generated on each dashboard page opening. + // We mark ids as redundant when user leaves the dashboard, because they won't be reused. + // Then we remove redundant dashboard contexts from local storage in order not to clutter it + return Object.fromEntries( + Object.entries(dashboardsContexts).filter( + ([, value]) => !value.isRedundant, + ), + ); +}; + +const updateDashboardTabLocalStorage = ( + dashboardPageId: string, + dashboardContext: DashboardContextForExplore, +) => { + const dashboardsContexts = getDashboardContextLocalStorage(); + setItem(LocalStorageKeys.dashboard__explore_context, { + ...dashboardsContexts, + [dashboardPageId]: dashboardContext, + }); +}; + +const SyncDashboardState: React.FC = ({ dashboardPageId }) => { + const dashboardContextForExplore = useSelector< + RootState, + DashboardContextForExplore + >( + ({ dashboardInfo, dashboardState, nativeFilters, dataMask }) => ({ + labelColors: dashboardInfo.metadata?.label_colors || EMPTY_OBJECT, + sharedLabelColors: + dashboardInfo.metadata?.shared_label_colors || EMPTY_OBJECT, + colorScheme: dashboardState?.colorScheme, + chartConfiguration: + dashboardInfo.metadata?.chart_configuration || EMPTY_OBJECT, + nativeFilters: Object.entries(nativeFilters.filters).reduce( + (acc, [key, filterValue]) => ({ + ...acc, + [key]: pick(filterValue, ['chartsInScope']), + }), + {}, + ), + dataMask, + dashboardId: dashboardInfo.id, + filterBoxFilters: getActiveFilters(), + dashboardPageId, + }), + shallowEqual, + ); + + useEffect(() => { + updateDashboardTabLocalStorage(dashboardPageId, dashboardContextForExplore); + return () => { + // mark tab id as redundant when dashboard unmounts - case when user opens + // Explore in the same tab + updateDashboardTabLocalStorage(dashboardPageId, { + ...dashboardContextForExplore, + isRedundant: true, + }); + }; + }, [dashboardContextForExplore, dashboardPageId]); + + return null; +}; + +export default SyncDashboardState; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx index 515fed1907bd..37739e537068 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx @@ -112,6 +112,7 @@ const HorizontalOverflowFilterControlContainer = styled( const VerticalFormItem = styled(StyledFormItem)` .ant-form-item-label { + overflow: visible; label.ant-form-item-required:not(.ant-form-item-required-mark-optional) { &::after { display: none; @@ -127,6 +128,7 @@ const HorizontalFormItem = styled(StyledFormItem)` } .ant-form-item-label { + overflow: visible; padding-bottom: 0; margin-right: ${({ theme }) => theme.gridUnit * 2}px; label.ant-form-item-required:not(.ant-form-item-required-mark-optional) { @@ -200,10 +202,11 @@ const DescriptionToolTip = ({ description }: { description: string }) => ( placement="right" overlayInnerStyle={{ display: '-webkit-box', - overflow: 'hidden', - WebkitLineClamp: 20, + WebkitLineClamp: 10, WebkitBoxOrient: 'vertical', + overflow: 'hidden', textOverflow: 'ellipsis', + whiteSpace: 'normal', }} getPopupContainer={trigger => trigger.parentElement as HTMLElement} > diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx index 5235edcdc353..f44a1a1df687 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterValue.tsx @@ -52,6 +52,7 @@ import { onFiltersRefreshSuccess, setDirectPathToChild, } from 'src/dashboard/actions/dashboardState'; +import { RESPONSIVE_WIDTH } from 'src/filters/components/common'; import { FAST_DEBOUNCE } from 'src/constants'; import { dispatchHoverAction, dispatchFocusAction } from './utils'; import { FilterControlProps } from './types'; @@ -322,7 +323,7 @@ const FilterValue: React.FC = ({ ) : ( import( /* webpackChunkName: "DashboardContainer" */ /* webpackPreload: true */ - 'src/dashboard/containers/Dashboard' + 'src/dashboard/components/DashboardBuilder/DashboardBuilder' ), ); @@ -83,74 +81,15 @@ type PageProps = { idOrSlug: string; }; -const getDashboardContextLocalStorage = () => { - const dashboardsContexts = getItem( - LocalStorageKeys.dashboard__explore_context, - {}, - ); - // A new dashboard tab id is generated on each dashboard page opening. - // We mark ids as redundant when user leaves the dashboard, because they won't be reused. - // Then we remove redundant dashboard contexts from local storage in order not to clutter it - return Object.fromEntries( - Object.entries(dashboardsContexts).filter( - ([, value]) => !value.isRedundant, - ), - ); -}; - -const updateDashboardTabLocalStorage = ( - dashboardPageId: string, - dashboardContext: DashboardContextForExplore, -) => { - const dashboardsContexts = getDashboardContextLocalStorage(); - setItem(LocalStorageKeys.dashboard__explore_context, { - ...dashboardsContexts, - [dashboardPageId]: dashboardContext, - }); -}; - -const useSyncDashboardStateWithLocalStorage = () => { - const dashboardPageId = useMemo(() => shortid.generate(), []); - const dashboardContextForExplore = useSelector< - RootState, - DashboardContextForExplore - >(({ dashboardInfo, dashboardState, nativeFilters, dataMask }) => ({ - labelColors: dashboardInfo.metadata?.label_colors || {}, - sharedLabelColors: dashboardInfo.metadata?.shared_label_colors || {}, - colorScheme: dashboardState?.colorScheme, - chartConfiguration: dashboardInfo.metadata?.chart_configuration || {}, - nativeFilters: Object.entries(nativeFilters.filters).reduce( - (acc, [key, filterValue]) => ({ - ...acc, - [key]: pick(filterValue, ['chartsInScope']), - }), - {}, - ), - dataMask, - dashboardId: dashboardInfo.id, - filterBoxFilters: getActiveFilters(), - dashboardPageId, - })); - - useEffect(() => { - updateDashboardTabLocalStorage(dashboardPageId, dashboardContextForExplore); - return () => { - // mark tab id as redundant when dashboard unmounts - case when user opens - // Explore in the same tab - updateDashboardTabLocalStorage(dashboardPageId, { - ...dashboardContextForExplore, - isRedundant: true, - }); - }; - }, [dashboardContextForExplore, dashboardPageId]); - return dashboardPageId; -}; - export const DashboardPage: FC = ({ idOrSlug }: PageProps) => { const theme = useTheme(); const dispatch = useDispatch(); const history = useHistory(); - const dashboardPageId = useSyncDashboardStateWithLocalStorage(); + const dashboardPageId = useMemo(() => shortid.generate(), []); + const hasDashboardInfoInitiated = useSelector( + ({ dashboardInfo }) => + dashboardInfo && Object.keys(dashboardInfo).length > 0, + ); const { addDangerToast } = useToasts(); const { result: dashboard, error: dashboardApiError } = useDashboard(idOrSlug); @@ -284,7 +223,7 @@ export const DashboardPage: FC = ({ idOrSlug }: PageProps) => { }, [addDangerToast, datasets, datasetsApiError, dispatch]); if (error) throw error; // caught in error boundary - if (!readyToRender || !isDashboardHydrated.current) return ; + if (!readyToRender || !hasDashboardInfoInitiated) return ; return ( <> @@ -295,8 +234,11 @@ export const DashboardPage: FC = ({ idOrSlug }: PageProps) => { chartContextMenuStyles(theme), ]} /> + - + + + ); diff --git a/superset-frontend/src/dataMask/reducer.ts b/superset-frontend/src/dataMask/reducer.ts index 6e9a5fae5404..f2163a54a44a 100644 --- a/superset-frontend/src/dataMask/reducer.ts +++ b/superset-frontend/src/dataMask/reducer.ts @@ -56,7 +56,6 @@ export function getInitialDataMask( } return { ...otherProps, - __cache: {}, extraFormData: {}, filterState: {}, ownState: {}, diff --git a/superset-frontend/src/explore/actions/saveModalActions.js b/superset-frontend/src/explore/actions/saveModalActions.js index 0bf7ced475e4..53c8952082d0 100644 --- a/superset-frontend/src/explore/actions/saveModalActions.js +++ b/superset-frontend/src/explore/actions/saveModalActions.js @@ -50,7 +50,7 @@ export function saveSliceSuccess(data) { return { type: SAVE_SLICE_SUCCESS, data }; } -const extractAddHocFiltersFromFormData = formDataToHandle => +const extractAdhocFiltersFromFormData = formDataToHandle => Object.entries(formDataToHandle).reduce( (acc, [key, value]) => ADHOC_FILTER_REGEX.test(key) @@ -71,7 +71,7 @@ export const getSlicePayload = ( owners, formDataFromSlice = {}, ) => { - const adhocFilters = extractAddHocFiltersFromFormData( + const adhocFilters = extractAdhocFiltersFromFormData( formDataWithNativeFilters, ); @@ -80,10 +80,17 @@ export const getSlicePayload = ( // to filter the chart. Before, any time range filter applied in the dashboard // would end up as an extra filter and when overwriting the chart the original // time range adhoc_filter was lost - if (isEmpty(adhocFilters?.adhoc_filters) && !isEmpty(formDataFromSlice)) { - formDataFromSlice?.adhoc_filters?.forEach(filter => { - if (filter.operator === Operators.TEMPORAL_RANGE && !filter.isExtra) { - adhocFilters.adhoc_filters.push({ ...filter, comparator: 'No filter' }); + if (!isEmpty(formDataFromSlice)) { + Object.keys(adhocFilters || {}).forEach(adhocFilterKey => { + if (isEmpty(adhocFilters[adhocFilterKey])) { + formDataFromSlice?.[adhocFilterKey]?.forEach(filter => { + if (filter.operator === Operators.TEMPORAL_RANGE && !filter.isExtra) { + adhocFilters[adhocFilterKey].push({ + ...filter, + comparator: 'No filter', + }); + } + }); } }); } diff --git a/superset-frontend/src/explore/actions/saveModalActions.test.js b/superset-frontend/src/explore/actions/saveModalActions.test.js index 4c0da5132dd7..fc50e3d3cf8f 100644 --- a/superset-frontend/src/explore/actions/saveModalActions.test.js +++ b/superset-frontend/src/explore/actions/saveModalActions.test.js @@ -391,4 +391,57 @@ describe('getSlicePayload', () => { formDataFromSlice.adhoc_filters, ); }); + + test('should return the correct payload when formDataWithNativeFilters has a filter with isExtra set to true in mixed chart', () => { + const formDataFromSliceWithAdhocFilterB = { + ...formDataFromSlice, + adhoc_filters_b: [ + { + clause: 'WHERE', + subject: 'year', + operator: 'TEMPORAL_RANGE', + comparator: 'No filter', + expressionType: 'SIMPLE', + }, + ], + }; + const formDataWithAdhocFiltersWithExtra = { + ...formDataWithNativeFilters, + viz_type: 'mixed_timeseries', + adhoc_filters: [ + { + clause: 'WHERE', + subject: 'year', + operator: 'TEMPORAL_RANGE', + comparator: 'No filter', + expressionType: 'SIMPLE', + isExtra: true, + }, + ], + adhoc_filters_b: [ + { + clause: 'WHERE', + subject: 'year', + operator: 'TEMPORAL_RANGE', + comparator: 'No filter', + expressionType: 'SIMPLE', + isExtra: true, + }, + ], + }; + const result = getSlicePayload( + sliceName, + formDataWithAdhocFiltersWithExtra, + dashboards, + owners, + formDataFromSliceWithAdhocFilterB, + ); + + expect(JSON.parse(result.params).adhoc_filters).toEqual( + formDataFromSliceWithAdhocFilterB.adhoc_filters, + ); + expect(JSON.parse(result.params).adhoc_filters_b).toEqual( + formDataFromSliceWithAdhocFilterB.adhoc_filters_b, + ); + }); }); diff --git a/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx b/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx index 207e197cd40d..55c3875f98a6 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx @@ -541,7 +541,7 @@ const ExtraOptions = ({
{t( - 'Specify the database version. This should be used with ' + - 'Presto in order to enable query cost estimation.', + 'Specify the database version. This is used with Presto for query cost ' + + 'estimation, and Dremio for syntax changes, among others.', )}
diff --git a/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx b/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx index dac4858e4adb..20197ecf5862 100644 --- a/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx +++ b/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx @@ -385,10 +385,10 @@ function RowLevelSecurityModal(props: RowLevelSecurityModalProps) {
- {t('Tables')} * + {t('Datasets')} *
diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx index c035f81c01b8..99e625987143 100644 --- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx +++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.test.tsx @@ -91,15 +91,6 @@ describe('SelectFilterPlugin', () => { test('Add multiple values with first render', async () => { getWrapper(); expect(setDataMask).toHaveBeenCalledWith({ - extraFormData: {}, - filterState: { - value: ['boy'], - }, - }); - expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { @@ -118,9 +109,6 @@ describe('SelectFilterPlugin', () => { userEvent.click(screen.getByTitle('girl')); expect(await screen.findByTitle(/girl/i)).toBeInTheDocument(); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { @@ -146,9 +134,6 @@ describe('SelectFilterPlugin', () => { }), ); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { adhoc_filters: [ { @@ -174,9 +159,6 @@ describe('SelectFilterPlugin', () => { }), ); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: {}, filterState: { label: undefined, @@ -191,9 +173,6 @@ describe('SelectFilterPlugin', () => { expect(await screen.findByTitle('girl')).toBeInTheDocument(); userEvent.click(screen.getByTitle('girl')); expect(setDataMask).toHaveBeenCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { @@ -216,9 +195,6 @@ describe('SelectFilterPlugin', () => { expect(await screen.findByRole('combobox')).toBeInTheDocument(); userEvent.click(screen.getByTitle(NULL_STRING)); expect(setDataMask).toHaveBeenLastCalledWith({ - __cache: { - value: ['boy'], - }, extraFormData: { filters: [ { diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx index 7d8ab55fb557..a4b9f5b05efa 100644 --- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx +++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx @@ -37,7 +37,6 @@ import { Select } from 'src/components'; import { SLOW_DEBOUNCE } from 'src/constants'; import { hasOption, propertyComparator } from 'src/components/Select/utils'; import { FilterBarOrientation } from 'src/dashboard/types'; -import { uniqWith, isEqual } from 'lodash'; import { PluginFilterSelectProps, SelectValue } from './types'; import { FilterPluginStyle, StatusMessage, StyledFormItem } from '../common'; import { getDataRecordFormatter, getSelectExtraFormData } from '../../utils'; @@ -46,15 +45,11 @@ type DataMaskAction = | { type: 'ownState'; ownState: JsonObject } | { type: 'filterState'; - __cache: JsonObject; extraFormData: ExtraFormData; filterState: { value: SelectValue; label?: string }; }; -function reducer( - draft: DataMask & { __cache?: JsonObject }, - action: DataMaskAction, -) { +function reducer(draft: DataMask, action: DataMaskAction) { switch (action.type) { case 'ownState': draft.ownState = { @@ -63,10 +58,18 @@ function reducer( }; return draft; case 'filterState': - draft.extraFormData = action.extraFormData; - // eslint-disable-next-line no-underscore-dangle - draft.__cache = action.__cache; - draft.filterState = { ...draft.filterState, ...action.filterState }; + if ( + JSON.stringify(draft.extraFormData) !== + JSON.stringify(action.extraFormData) + ) { + draft.extraFormData = action.extraFormData; + } + if ( + JSON.stringify(draft.filterState) !== JSON.stringify(action.filterState) + ) { + draft.filterState = { ...draft.filterState, ...action.filterState }; + } + return draft; default: return draft; @@ -130,7 +133,6 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { const suffix = inverseSelection && values?.length ? t(' (excluded)') : ''; dispatchDataMask({ type: 'filterState', - __cache: filterState, extraFormData: getSelectExtraFormData( col, values, @@ -219,16 +221,13 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { }, [filterState.validateMessage, filterState.validateStatus]); const uniqueOptions = useMemo(() => { - const allOptions = [...data]; - return uniqWith(allOptions, isEqual).map(row => { - const [value] = groupby.map(col => row[col]); - return { - label: labelFormatter(value, datatype), - value, - isNewOption: false, - }; - }); - }, [data, datatype, groupby, labelFormatter]); + const allOptions = new Set([...data.map(el => el[col])]); + return [...allOptions].map((value: string) => ({ + label: labelFormatter(value, datatype), + value, + isNewOption: false, + })); + }, [data, datatype, col, labelFormatter]); const options = useMemo(() => { if (search && !multiSelect && !hasOption(search, uniqueOptions, true)) { diff --git a/superset-frontend/src/filters/components/common.ts b/superset-frontend/src/filters/components/common.ts index af1fe9c79176..cb6d7f22f14b 100644 --- a/superset-frontend/src/filters/components/common.ts +++ b/superset-frontend/src/filters/components/common.ts @@ -20,9 +20,11 @@ import { styled } from '@superset-ui/core'; import { PluginFilterStylesProps } from './types'; import FormItem from '../../components/Form/FormItem'; +export const RESPONSIVE_WIDTH = 0; + export const FilterPluginStyle = styled.div` min-height: ${({ height }) => height}px; - width: ${({ width }) => width}px; + width: ${({ width }) => (width === RESPONSIVE_WIDTH ? '100%' : `${width}px`)}; `; export const StyledFormItem = styled(FormItem)` diff --git a/superset-frontend/src/utils/errorMessages.ts b/superset-frontend/src/utils/errorMessages.ts index 16a04105c4c8..d5bfbdc17b80 100644 --- a/superset-frontend/src/utils/errorMessages.ts +++ b/superset-frontend/src/utils/errorMessages.ts @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + // Error messages used in many places across applications const COMMON_ERR_MESSAGES = { SESSION_TIMED_OUT: diff --git a/superset/common/query_context_factory.py b/superset/common/query_context_factory.py index 62e8b7989355..e4680ed5eda8 100644 --- a/superset/common/query_context_factory.py +++ b/superset/common/query_context_factory.py @@ -186,7 +186,6 @@ def _apply_granularity( filter for filter in query_object.filter if filter["col"] != filter_to_remove - or filter["op"] != "TEMPORAL_RANGE" ] def _apply_filters(self, query_object: QueryObject) -> None: diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index 754c9ae91a85..f6152b232a93 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -285,11 +285,10 @@ def _get_timestamp_format( datasource = self._qc_datasource labels = tuple( label - for label in { + for label in [ *get_base_axis_labels(query_object.columns), - *[col for col in query_object.columns or [] if isinstance(col, str)], query_object.granularity, - } + ] if datasource # Query datasource didn't support `get_column` and hasattr(datasource, "get_column") diff --git a/superset/common/query_object_factory.py b/superset/common/query_object_factory.py index a76431122e38..9246aed9e002 100644 --- a/superset/common/query_object_factory.py +++ b/superset/common/query_object_factory.py @@ -16,24 +16,26 @@ # under the License. from __future__ import annotations -from datetime import datetime -from typing import Any, TYPE_CHECKING +from typing import Any, cast, TYPE_CHECKING from superset.common.chart_data import ChartDataResultType from superset.common.query_object import QueryObject from superset.common.utils.time_range_utils import get_since_until_from_time_range +from superset.constants import NO_TIME_RANGE +from superset.superset_typing import Column from superset.utils.core import ( apply_max_row_limit, DatasourceDict, DatasourceType, FilterOperator, + get_xaxis_label, QueryObjectFilterClause, ) if TYPE_CHECKING: from sqlalchemy.orm import sessionmaker - from superset.connectors.base.models import BaseColumn, BaseDatasource + from superset.connectors.base.models import BaseDatasource from superset.daos.datasource import DatasourceDAO @@ -68,15 +70,14 @@ def create( # pylint: disable=too-many-arguments processed_extras = self._process_extras(extras) result_type = kwargs.setdefault("result_type", parent_result_type) row_limit = self._process_row_limit(row_limit, result_type) + processed_time_range = self._process_time_range( + time_range, kwargs.get("filters"), kwargs.get("columns") + ) from_dttm, to_dttm = get_since_until_from_time_range( - time_range, time_shift, processed_extras + processed_time_range, time_shift, processed_extras ) kwargs["from_dttm"] = from_dttm kwargs["to_dttm"] = to_dttm - if datasource_model_instance and kwargs.get("filters", []): - kwargs["filters"] = self._process_filters( - datasource_model_instance, kwargs["filters"] - ) return QueryObject( datasource=datasource_model_instance, extras=extras, @@ -110,58 +111,33 @@ def _process_row_limit( ) return apply_max_row_limit(row_limit or default_row_limit) + @staticmethod + def _process_time_range( + time_range: str | None, + filters: list[QueryObjectFilterClause] | None = None, + columns: list[Column] | None = None, + ) -> str: + if time_range is None: + time_range = NO_TIME_RANGE + temporal_flt = [ + flt + for flt in filters or [] + if flt.get("op") == FilterOperator.TEMPORAL_RANGE + ] + if temporal_flt: + # Use the temporal filter as the time range. + # if the temporal filters uses x-axis as the temporal filter + # then use it or use the first temporal filter + xaxis_label = get_xaxis_label(columns or []) + match_flt = [ + flt for flt in temporal_flt if flt.get("col") == xaxis_label + ] + if match_flt: + time_range = cast(str, match_flt[0].get("val")) + else: + time_range = cast(str, temporal_flt[0].get("val")) + return time_range + # light version of the view.utils.core # import view.utils require application context # Todo: move it and the view.utils.core to utils package - - # pylint: disable=no-self-use - def _process_filters( - self, datasource: BaseDatasource, query_filters: list[QueryObjectFilterClause] - ) -> list[QueryObjectFilterClause]: - def get_dttm_filter_value( - value: Any, col: BaseColumn, date_format: str - ) -> int | str: - if not isinstance(value, int): - return value - if date_format in {"epoch_ms", "epoch_s"}: - if date_format == "epoch_s": - value = str(value) - else: - value = str(value * 1000) - else: - dttm = datetime.utcfromtimestamp(value / 1000) - value = dttm.strftime(date_format) - - if col.type in col.num_types: - value = int(value) - return value - - for query_filter in query_filters: - if query_filter.get("op") == FilterOperator.TEMPORAL_RANGE: - continue - filter_col = query_filter.get("col") - if not isinstance(filter_col, str): - continue - column = datasource.get_column(filter_col) - if not column: - continue - filter_value = query_filter.get("val") - - date_format = column.python_date_format - if not date_format and datasource.db_extra: - date_format = datasource.db_extra.get( - "python_date_format_by_column_name", {} - ).get(column.column_name) - - if column.is_dttm and date_format: - if isinstance(filter_value, list): - query_filter["val"] = [ - get_dttm_filter_value(value, column, date_format) - for value in filter_value - ] - else: - query_filter["val"] = get_dttm_filter_value( - filter_value, column, date_format - ) - - return query_filters diff --git a/superset/config.py b/superset/config.py index 27f78832d1e3..e15c7bf99042 100644 --- a/superset/config.py +++ b/superset/config.py @@ -186,7 +186,10 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: SECRET_KEY = os.environ.get("SUPERSET_SECRET_KEY") or CHANGE_ME_SECRET_KEY # The SQLAlchemy connection string. -SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "superset.db") +SQLALCHEMY_DATABASE_URI = ( + f"""sqlite:///{os.path.join(DATA_DIR, "superset.db")}?check_same_thread=false""" +) + # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp' # SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp' @@ -1418,7 +1421,6 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument "style-src": [ "'self'", "'unsafe-inline'", - "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", ], "script-src": ["'self'", "'strict-dynamic'"], }, @@ -1440,7 +1442,6 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument "style-src": [ "'self'", "'unsafe-inline'", - "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", ], "script-src": ["'self'", "'unsafe-inline'", "'unsafe-eval'"], }, diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index 706c82635c1b..f560b4d86b7d 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -496,13 +496,6 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult: """ raise NotImplementedError() - def values_for_column(self, column_name: str, limit: int = 10000) -> list[Any]: - """Given a column, returns an iterable of distinct values - - This is used to populate the dropdown showing a list of - values in filters in the explore view""" - raise NotImplementedError() - @staticmethod def default_query(qry: Query) -> Query: return qry diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 79203256f1e6..5edc724b23e8 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -46,7 +46,6 @@ inspect, Integer, or_, - select, String, Table, Text, @@ -789,34 +788,6 @@ def get_fetch_values_predicate( ) ) from ex - def values_for_column(self, column_name: str, limit: int = 10000) -> list[Any]: - """Runs query against sqla to retrieve some - sample values for the given column. - """ - cols = {col.column_name: col for col in self.columns} - target_col = cols[column_name] - tp = self.get_template_processor() - tbl, cte = self.get_from_clause(tp) - - qry = ( - select([target_col.get_sqla_col(template_processor=tp)]) - .select_from(tbl) - .distinct() - ) - if limit: - qry = qry.limit(limit) - - if self.fetch_values_predicate: - qry = qry.where(self.get_fetch_values_predicate(template_processor=tp)) - - with self.database.get_sqla_engine_with_context() as engine: - sql = qry.compile(engine, compile_kwargs={"literal_binds": True}) - sql = self._apply_cte(sql, cte) - sql = self.mutate_query_from_config(sql) - - df = pd.read_sql_query(sql=sql, con=engine) - return df[column_name].to_list() - def mutate_query_from_config(self, sql: str) -> str: """Apply config's SQL_QUERY_MUTATOR diff --git a/superset/daos/dashboard.py b/superset/daos/dashboard.py index f9544aa53d5d..8cbdfcd77ddc 100644 --- a/superset/daos/dashboard.py +++ b/superset/daos/dashboard.py @@ -68,8 +68,6 @@ def get_by_id_or_slug(cls, id_or_slug: int | str) -> Dashboard: query = ( db.session.query(Dashboard) .filter(id_or_slug_filter(id_or_slug)) - .outerjoin(Slice, Dashboard.slices) - .outerjoin(Slice.table) .outerjoin(Dashboard.owners) .outerjoin(Dashboard.roles) ) @@ -183,15 +181,6 @@ def validate_update_slug_uniqueness(dashboard_id: int, slug: str | None) -> bool return not db.session.query(dashboard_query.exists()).scalar() return True - @staticmethod - def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard: - owners = list(model.owners) - for slc in model.slices: - slc.owners = list(set(owners) | set(slc.owners)) - if commit: - db.session.commit() - return model - @classmethod def delete(cls, items: Dashboard | list[Dashboard], commit: bool = True) -> None: item_ids = [item.id for item in get_iterable(items)] diff --git a/superset/daos/dataset.py b/superset/daos/dataset.py index 716fcd9a057a..0b6c4f627171 100644 --- a/superset/daos/dataset.py +++ b/superset/daos/dataset.py @@ -100,11 +100,15 @@ def validate_uniqueness( @staticmethod def validate_update_uniqueness( - database_id: int, dataset_id: int, name: str + database_id: int, + schema: str | None, + dataset_id: int, + name: str, ) -> bool: dataset_query = db.session.query(SqlaTable).filter( SqlaTable.table_name == name, SqlaTable.database_id == database_id, + SqlaTable.schema == schema, SqlaTable.id != dataset_id, ) return not db.session.query(dataset_query.exists()).scalar() diff --git a/superset/dashboards/commands/create.py b/superset/dashboards/commands/create.py index 98ecd6eb78e3..4477f046410b 100644 --- a/superset/dashboards/commands/create.py +++ b/superset/dashboards/commands/create.py @@ -40,8 +40,7 @@ def __init__(self, data: dict[str, Any]): def run(self) -> Model: self.validate() try: - dashboard = DashboardDAO.create(self._properties, commit=False) - dashboard = DashboardDAO.update_charts_owners(dashboard, commit=True) + dashboard = DashboardDAO.create(self._properties, commit=True) except DAOCreateFailedError as ex: logger.exception(ex.exception) raise DashboardCreateFailedError() from ex diff --git a/superset/dashboards/commands/update.py b/superset/dashboards/commands/update.py index c880eebe899e..f9975c0dd2f4 100644 --- a/superset/dashboards/commands/update.py +++ b/superset/dashboards/commands/update.py @@ -58,7 +58,6 @@ def run(self) -> Model: data=json.loads(self._properties.get("json_metadata", "{}")), commit=False, ) - dashboard = DashboardDAO.update_charts_owners(dashboard, commit=False) db.session.commit() except DAOUpdateFailedError as ex: logger.exception(ex.exception) diff --git a/superset/datasets/commands/update.py b/superset/datasets/commands/update.py index a38439fb7f23..dfa3a3dcf85c 100644 --- a/superset/datasets/commands/update.py +++ b/superset/datasets/commands/update.py @@ -89,7 +89,10 @@ def validate(self) -> None: table_name = self._properties.get("table_name", None) # Validate uniqueness if not DatasetDAO.validate_update_uniqueness( - self._model.database_id, self._model_id, table_name + self._model.database_id, + self._model.schema, + self._model_id, + table_name, ): exceptions.append(DatasetExistsValidationError(table_name)) # Validate/Populate database not allowed to change diff --git a/superset/datasource/api.py b/superset/datasource/api.py index 6399d197e004..213298d30b4b 100644 --- a/superset/datasource/api.py +++ b/superset/datasource/api.py @@ -120,6 +120,10 @@ def get_column_values( column_name=column_name, limit=row_limit ) return self.response(200, result=payload) + except KeyError: + return self.response( + 400, message=f"Column name {column_name} does not exist" + ) except NotImplementedError: return self.response( 400, diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 5836e6163f8d..6be3ab24b0c1 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -1053,6 +1053,24 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: query object""" # TODO: Fix circular import error caused by importing sql_lab.Query + @classmethod + def execute_with_cursor( + cls, cursor: Any, sql: str, query: Query, session: Session + ) -> None: + """ + Trigger execution of a query and handle the resulting cursor. + + For most implementations this just makes calls to `execute` and + `handle_cursor` consecutively, but in some engines (e.g. Trino) we may + need to handle client limitations such as lack of async support and + perform a more complicated operation to get information from the cursor + in a timely manner and facilitate operations such as query stop + """ + logger.debug("Query %d: Running query: %s", query.id, sql) + cls.execute(cursor, sql, async_=True) + logger.debug("Query %d: Handling cursor", query.id) + cls.handle_cursor(cursor, query, session) + @classmethod def extract_error_message(cls, ex: Exception) -> str: return f"{cls.engine} error: {cls._extract_error_message(ex)}" diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index eff78c4fa4eb..19c11939c440 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -17,6 +17,8 @@ from __future__ import annotations import logging +import threading +import time from typing import Any, TYPE_CHECKING import simplejson as json @@ -154,14 +156,21 @@ def get_tracking_url(cls, cursor: Cursor) -> str | None: @classmethod def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None: - if tracking_url := cls.get_tracking_url(cursor): - query.tracking_url = tracking_url + """ + Handle a trino client cursor. + + WARNING: if you execute a query, it will block until complete and you + will not be able to handle the cursor until complete. Use + `execute_with_cursor` instead, to handle this asynchronously. + """ # Adds the executed query id to the extra payload so the query can be cancelled - query.set_extra_json_key( - key=QUERY_CANCEL_KEY, - value=(cancel_query_id := cursor.stats["queryId"]), - ) + cancel_query_id = cursor.query_id + logger.debug("Query %d: queryId %s found in cursor", query.id, cancel_query_id) + query.set_extra_json_key(key=QUERY_CANCEL_KEY, value=cancel_query_id) + + if tracking_url := cls.get_tracking_url(cursor): + query.tracking_url = tracking_url session.commit() @@ -176,6 +185,57 @@ def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None: super().handle_cursor(cursor=cursor, query=query, session=session) + @classmethod + def execute_with_cursor( + cls, cursor: Cursor, sql: str, query: Query, session: Session + ) -> None: + """ + Trigger execution of a query and handle the resulting cursor. + + Trino's client blocks until the query is complete, so we need to run it + in another thread and invoke `handle_cursor` to poll for the query ID + to appear on the cursor in parallel. + """ + # Fetch the query ID beforehand, since it might fail inside the thread due to + # how the SQLAlchemy session is handled. + query_id = query.id + + execute_result: dict[str, Any] = {} + execute_event = threading.Event() + + def _execute(results: dict[str, Any], event: threading.Event) -> None: + logger.debug("Query %d: Running query: %s", query_id, sql) + + try: + cls.execute(cursor, sql) + except Exception as ex: # pylint: disable=broad-except + results["error"] = ex + finally: + event.set() + + execute_thread = threading.Thread( + target=_execute, + args=(execute_result, execute_event), + ) + execute_thread.start() + + # Wait for a query ID to be available before handling the cursor, as + # it's required by that method; it may never become available on error. + while not cursor.query_id and not execute_event.is_set(): + time.sleep(0.1) + + logger.debug("Query %d: Handling cursor", query_id) + cls.handle_cursor(cursor, query, session) + + # Block until the query completes; same behaviour as the client itself + logger.debug("Query %d: Waiting for query to complete", query_id) + execute_event.wait() + + # Unfortunately we'll mangle the stack trace due to the thread, but + # throwing the original exception allows mapping database errors as normal + if err := execute_result.get("error"): + raise err + @classmethod def prepare_cancel_query(cls, query: Query, session: Session) -> None: if QUERY_CANCEL_KEY not in query.extra: @@ -183,7 +243,7 @@ def prepare_cancel_query(cls, query: Query, session: Session) -> None: session.commit() @classmethod - def cancel_query(cls, cursor: Any, query: Query, cancel_query_id: str) -> bool: + def cancel_query(cls, cursor: Cursor, query: Query, cancel_query_id: str) -> bool: """ Cancel query in the underlying database. diff --git a/superset/jinja_context.py b/superset/jinja_context.py index 4bb0b91a4e3d..a736b9278ec1 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -25,6 +25,7 @@ from jinja2 import DebugUndefined from jinja2.sandbox import SandboxedEnvironment from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.sql.expression import bindparam from sqlalchemy.types import String from typing_extensions import TypedDict @@ -397,23 +398,39 @@ def validate_template_context( return validate_context_types(context) -def where_in(values: list[Any], mark: str = "'") -> str: - """ - Given a list of values, build a parenthesis list suitable for an IN expression. +class WhereInMacro: # pylint: disable=too-few-public-methods + def __init__(self, dialect: Dialect): + self.dialect = dialect - >>> where_in([1, "b", 3]) - (1, 'b', 3) + def __call__(self, values: list[Any], mark: Optional[str] = None) -> str: + """ + Given a list of values, build a parenthesis list suitable for an IN expression. - """ + >>> from sqlalchemy.dialects import mysql + >>> where_in = WhereInMacro(dialect=mysql.dialect()) + >>> where_in([1, "Joe's", 3]) + (1, 'Joe''s', 3) - def quote(value: Any) -> str: - if isinstance(value, str): - value = value.replace(mark, mark * 2) - return f"{mark}{value}{mark}" - return str(value) + """ + binds = [bindparam(f"value_{i}", value) for i, value in enumerate(values)] + string_representations = [ + str( + bind.compile( + dialect=self.dialect, compile_kwargs={"literal_binds": True} + ) + ) + for bind in binds + ] + joined_values = ", ".join(string_representations) + result = f"({joined_values})" + + if mark: + result += ( + "\n-- WARNING: the `mark` parameter was removed from the `where_in` " + "macro for security reasons\n" + ) - joined_values = ", ".join(quote(value) for value in values) - return f"({joined_values})" + return result class BaseTemplateProcessor: @@ -449,7 +466,7 @@ def __init__( self.set_context(**kwargs) # custom filters - self._env.filters["where_in"] = where_in + self._env.filters["where_in"] = WhereInMacro(database.get_dialect()) def set_context(self, **kwargs: Any) -> None: self._context.update(kwargs) diff --git a/superset/models/core.py b/superset/models/core.py index 0581756b818e..51d5f8a162f2 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -185,6 +185,7 @@ class Database( "is_managed_externally", "external_url", "encrypted_extra", + "impersonate_user", ] export_children = ["tables"] diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 38f29eb2234d..45d1b98406cc 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -700,10 +700,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods "MIN": sa.func.MIN, "MAX": sa.func.MAX, } - - @property - def fetch_value_predicate(self) -> str: - return "fix this!" + fetch_values_predicate = None @property def type(self) -> str: @@ -761,7 +758,7 @@ def db_engine_spec(self) -> builtins.type["BaseEngineSpec"]: raise NotImplementedError() @property - def database(self) -> builtins.type["Database"]: + def database(self) -> "Database": raise NotImplementedError() @property @@ -776,23 +773,26 @@ def sql(self) -> str: def columns(self) -> list[Any]: raise NotImplementedError() - def get_fetch_values_predicate( - self, template_processor: Optional[BaseTemplateProcessor] = None - ) -> TextClause: - raise NotImplementedError() - def get_extra_cache_keys(self, query_obj: dict[str, Any]) -> list[Hashable]: raise NotImplementedError() def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: raise NotImplementedError() + def get_fetch_values_predicate( + self, + template_processor: Optional[ # pylint: disable=unused-argument + BaseTemplateProcessor + ] = None, + ) -> TextClause: + return self.fetch_values_predicate + def get_sqla_row_level_filters( self, template_processor: BaseTemplateProcessor, ) -> list[TextClause]: """ - Return the appropriate row level security filters for this table and the + Returns the appropriate row level security filters for this table and the current user. A custom username can be passed when the user is not present in the Flask global namespace. @@ -896,7 +896,7 @@ def get_query_str_extended( self, query_obj: QueryObjectDict, mutate: bool = True ) -> QueryStringExtended: sqlaq = self.get_sqla_query(**query_obj) - sql = self.database.compile_sqla_query(sqlaq.sqla_query) # type: ignore + sql = self.database.compile_sqla_query(sqlaq.sqla_query) sql = self._apply_cte(sql, sqlaq.cte) sql = sqlparse.format(sql, reindent=True) if mutate: @@ -935,7 +935,7 @@ def _normalize_prequery_result_type( value = value.item() column_ = columns_by_name[dimension] - db_extra: dict[str, Any] = self.database.get_extra() # type: ignore + db_extra: dict[str, Any] = self.database.get_extra() if isinstance(column_, dict): if ( @@ -1020,9 +1020,7 @@ def assign_column_label(df: pd.DataFrame) -> Optional[pd.DataFrame]: return df try: - df = self.database.get_df( - sql, self.schema, mutator=assign_column_label # type: ignore - ) + df = self.database.get_df(sql, self.schema, mutator=assign_column_label) except Exception as ex: # pylint: disable=broad-except df = pd.DataFrame() status = QueryStatus.FAILED @@ -1334,36 +1332,34 @@ def get_time_filter( # pylint: disable=too-many-arguments return and_(*l) def values_for_column(self, column_name: str, limit: int = 10000) -> list[Any]: - """Runs query against sqla to retrieve some - sample values for the given column. - """ - cols = {} - for col in self.columns: - if isinstance(col, dict): - cols[col.get("column_name")] = col - else: - cols[col.column_name] = col - - target_col = cols[column_name] - tp = None # todo(hughhhh): add back self.get_template_processor() + # always denormalize column name before querying for values + db_dialect = self.database.get_dialect() + denormalized_col_name = self.database.db_engine_spec.denormalize_name( + db_dialect, column_name + ) + cols = {col.column_name: col for col in self.columns} + target_col = cols[denormalized_col_name] + tp = self.get_template_processor() tbl, cte = self.get_from_clause(tp) - if isinstance(target_col, dict): - sql_column = sa.column(target_col.get("name")) - else: - sql_column = target_col - - qry = sa.select([sql_column]).select_from(tbl).distinct() + qry = ( + sa.select([target_col.get_sqla_col(template_processor=tp)]) + .select_from(tbl) + .distinct() + ) if limit: qry = qry.limit(limit) - with self.database.get_sqla_engine_with_context() as engine: # type: ignore + if self.fetch_values_predicate: + qry = qry.where(self.get_fetch_values_predicate(template_processor=tp)) + + with self.database.get_sqla_engine_with_context() as engine: sql = qry.compile(engine, compile_kwargs={"literal_binds": True}) sql = self._apply_cte(sql, cte) sql = self.mutate_query_from_config(sql) df = pd.read_sql_query(sql=sql, con=engine) - return df[column_name].to_list() + return df[denormalized_col_name].to_list() def get_timestamp_expression( self, @@ -1935,7 +1931,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma ) having_clause_and += [self.text(having)] - if apply_fetch_values_predicate and self.fetch_values_predicate: # type: ignore + if apply_fetch_values_predicate and self.fetch_values_predicate: qry = qry.where( self.get_fetch_values_predicate(template_processor=template_processor) ) diff --git a/superset/sql_lab.py b/superset/sql_lab.py index afc682b10fbc..ca157b324085 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -191,7 +191,7 @@ def get_sql_results( # pylint: disable=too-many-arguments return handle_query_error(ex, query, session) -def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statements +def execute_sql_statement( # pylint: disable=too-many-arguments sql_statement: str, query: Query, session: Session, @@ -271,10 +271,7 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statem ) session.commit() with stats_timing("sqllab.query.time_executing_query", stats_logger): - logger.debug("Query %d: Running query: %s", query.id, sql) - db_engine_spec.execute(cursor, sql, async_=True) - logger.debug("Query %d: Handling cursor", query.id) - db_engine_spec.handle_cursor(cursor, query, session) + db_engine_spec.execute_with_cursor(cursor, sql, query, session) with stats_timing("sqllab.query.time_fetching_results", stats_logger): logger.debug( diff --git a/superset/views/datasource/utils.py b/superset/views/datasource/utils.py index 65b19c34938f..e5294278982c 100644 --- a/superset/views/datasource/utils.py +++ b/superset/views/datasource/utils.py @@ -43,7 +43,7 @@ def get_limit_clause(page: Optional[int], per_page: Optional[int]) -> dict[str, return {"row_offset": offset, "row_limit": limit} -def get_samples( # pylint: disable=too-many-arguments,too-many-locals +def get_samples( # pylint: disable=too-many-arguments datasource_type: str, datasource_id: int, force: bool = False, @@ -104,21 +104,18 @@ def get_samples( # pylint: disable=too-many-arguments,too-many-locals result_type=ChartDataResultType.FULL, force=force, ) - samples_results = samples_instance.get_payload() - count_star_results = count_star_instance.get_payload() try: - sample_data = samples_results["queries"][0] - count_star_data = count_star_results["queries"][0] - failed_status = ( - sample_data.get("status") == QueryStatus.FAILED - or count_star_data.get("status") == QueryStatus.FAILED - ) - error_msg = sample_data.get("error") or count_star_data.get("error") - if failed_status and error_msg: - cache_key = sample_data.get("cache_key") - QueryCacheManager.delete(cache_key, region=CacheRegion.DATA) - raise DatasetSamplesFailedError(error_msg) + count_star_data = count_star_instance.get_payload()["queries"][0] + + if count_star_data.get("status") == QueryStatus.FAILED: + raise DatasetSamplesFailedError(count_star_data.get("error")) + + sample_data = samples_instance.get_payload()["queries"][0] + + if sample_data.get("status") == QueryStatus.FAILED: + QueryCacheManager.delete(sample_data.get("cache_key"), CacheRegion.DATA) + raise DatasetSamplesFailedError(sample_data.get("error")) sample_data["page"] = page sample_data["per_page"] = per_page diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index f676e873b700..cc7bc109b456 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1378,55 +1378,6 @@ def test_dashboard_get_no_username(self): db.session.delete(model) db.session.commit() - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_update_dashboard_chart_owners(self): - """ - Dashboard API: Test update chart owners - """ - user_alpha1 = self.create_user( - "alpha1", "password", "Alpha", email="alpha1@superset.org" - ) - user_alpha2 = self.create_user( - "alpha2", "password", "Alpha", email="alpha2@superset.org" - ) - admin = self.get_user("admin") - slices = [] - slices.append( - db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() - ) - slices.append(db.session.query(Slice).filter_by(slice_name="Trends").first()) - slices.append(db.session.query(Slice).filter_by(slice_name="Boys").first()) - - dashboard = self.insert_dashboard( - "title1", - "slug1", - [admin.id], - slices=slices, - ) - self.login(username="admin") - uri = f"api/v1/dashboard/{dashboard.id}" - dashboard_data = {"owners": [user_alpha1.id, user_alpha2.id]} - rv = self.client.put(uri, json=dashboard_data) - self.assertEqual(rv.status_code, 200) - - # verify slices owners include alpha1 and alpha2 users - slices_ids = [slice.id for slice in slices] - # Refetch Slices - slices = db.session.query(Slice).filter(Slice.id.in_(slices_ids)).all() - for slice in slices: - self.assertIn(user_alpha1, slice.owners) - self.assertIn(user_alpha2, slice.owners) - self.assertNotIn(admin, slice.owners) - # Revert owners on slice - slice.owners = [] - db.session.commit() - - # Rollback changes - db.session.delete(dashboard) - db.session.delete(user_alpha1) - db.session.delete(user_alpha2) - db.session.commit() - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_update_dashboard_chart_owners_propagation(self): """ diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py index 00a98b2c21d9..8c2082d1c4b1 100644 --- a/tests/integration_tests/query_context_tests.py +++ b/tests/integration_tests/query_context_tests.py @@ -836,9 +836,11 @@ def test_special_chars_in_column_name(app_context, physical_dataset): query_object = qc.queries[0] df = qc.get_df_payload(query_object)["df"] - - # sqlite doesn't have timestamp columns - if query_object.datasource.database.backend != "sqlite": + if query_object.datasource.database.backend == "sqlite": + # sqlite returns string as timestamp column + assert df["time column with spaces"][0] == "2002-01-03 00:00:00" + assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00" + else: assert df["time column with spaces"][0].strftime("%Y-%m-%d") == "2002-01-03" assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01" diff --git a/tests/unit_tests/common/test_process_time_range.py b/tests/unit_tests/common/test_process_time_range.py new file mode 100644 index 000000000000..12ee6d21aa3c --- /dev/null +++ b/tests/unit_tests/common/test_process_time_range.py @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset.common.query_object_factory import QueryObjectFactory +from superset.constants import NO_TIME_RANGE + + +def test_process_time_range(): + """ + correct empty time range + """ + assert QueryObjectFactory._process_time_range(None) == NO_TIME_RANGE + + """ + Use the first temporal filter as time range + """ + filters = [ + {"col": "dttm", "op": "TEMPORAL_RANGE", "val": "2001 : 2002"}, + {"col": "dttm2", "op": "TEMPORAL_RANGE", "val": "2002 : 2003"}, + ] + assert QueryObjectFactory._process_time_range(None, filters) == "2001 : 2002" + + """ + Use the BASE_AXIS temporal filter as time range + """ + columns = [ + { + "columnType": "BASE_AXIS", + "label": "dttm2", + "sqlExpression": "dttm", + } + ] + assert ( + QueryObjectFactory._process_time_range(None, filters, columns) == "2002 : 2003" + ) diff --git a/tests/unit_tests/common/test_query_object_factory.py b/tests/unit_tests/common/test_query_object_factory.py index 4e8fadfe3e99..02304828dca8 100644 --- a/tests/unit_tests/common/test_query_object_factory.py +++ b/tests/unit_tests/common/test_query_object_factory.py @@ -43,45 +43,9 @@ def session_factory() -> Mock: return Mock() -class SimpleDatasetColumn: - def __init__(self, col_params: dict[str, Any]): - self.__dict__.update(col_params) - - -TEMPORAL_COLUMN_NAMES = ["temporal_column", "temporal_column_with_python_date_format"] -TEMPORAL_COLUMNS = { - TEMPORAL_COLUMN_NAMES[0]: SimpleDatasetColumn( - { - "column_name": TEMPORAL_COLUMN_NAMES[0], - "is_dttm": True, - "python_date_format": None, - "type": "string", - "num_types": ["BIGINT"], - } - ), - TEMPORAL_COLUMN_NAMES[1]: SimpleDatasetColumn( - { - "column_name": TEMPORAL_COLUMN_NAMES[1], - "type": "BIGINT", - "is_dttm": True, - "python_date_format": "%Y", - "num_types": ["BIGINT"], - } - ), -} - - @fixture def connector_registry() -> Mock: - datasource_dao_mock = Mock(spec=["get_datasource"]) - datasource_dao_mock.get_datasource.return_value = Mock() - datasource_dao_mock.get_datasource().get_column = Mock( - side_effect=lambda col_name: TEMPORAL_COLUMNS[col_name] - if col_name in TEMPORAL_COLUMN_NAMES - else Mock() - ) - datasource_dao_mock.get_datasource().db_extra = None - return datasource_dao_mock + return Mock(spec=["get_datasource"]) def apply_max_row_limit(limit: int, max_limit: Optional[int] = None) -> int: @@ -148,55 +112,3 @@ def test_query_context_null_post_processing_op( raw_query_context["result_type"], **raw_query_object ) assert query_object.post_processing == [] - - def test_query_context_no_python_date_format_filters( - self, - query_object_factory: QueryObjectFactory, - raw_query_context: dict[str, Any], - ): - raw_query_object = raw_query_context["queries"][0] - raw_query_object["filters"].append( - {"col": TEMPORAL_COLUMN_NAMES[0], "op": "==", "val": 315532800000} - ) - query_object = query_object_factory.create( - raw_query_context["result_type"], - raw_query_context["datasource"], - **raw_query_object - ) - assert query_object.filter[3]["val"] == 315532800000 - - def test_query_context_python_date_format_filters( - self, - query_object_factory: QueryObjectFactory, - raw_query_context: dict[str, Any], - ): - raw_query_object = raw_query_context["queries"][0] - raw_query_object["filters"].append( - {"col": TEMPORAL_COLUMN_NAMES[1], "op": "==", "val": 315532800000} - ) - query_object = query_object_factory.create( - raw_query_context["result_type"], - raw_query_context["datasource"], - **raw_query_object - ) - assert query_object.filter[3]["val"] == 1980 - - def test_query_context_python_date_format_filters_list_of_values( - self, - query_object_factory: QueryObjectFactory, - raw_query_context: dict[str, Any], - ): - raw_query_object = raw_query_context["queries"][0] - raw_query_object["filters"].append( - { - "col": TEMPORAL_COLUMN_NAMES[1], - "op": "==", - "val": [315532800000, 631152000000], - } - ) - query_object = query_object_factory.create( - raw_query_context["result_type"], - raw_query_context["datasource"], - **raw_query_object - ) - assert query_object.filter[3]["val"] == [1980, 1990] diff --git a/tests/unit_tests/dao/dataset_test.py b/tests/unit_tests/dao/dataset_test.py new file mode 100644 index 000000000000..288f68cae026 --- /dev/null +++ b/tests/unit_tests/dao/dataset_test.py @@ -0,0 +1,83 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy.orm.session import Session + +from superset.daos.dataset import DatasetDAO + + +def test_validate_update_uniqueness(session: Session) -> None: + """ + Test the `validate_update_uniqueness` static method. + + In particular, allow datasets with the same name in the same database as long as they + are in different schemas + """ + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + + SqlaTable.metadata.create_all(session.get_bind()) + + database = Database( + database_name="my_db", + sqlalchemy_uri="sqlite://", + ) + dataset1 = SqlaTable( + table_name="my_dataset", + schema="main", + database=database, + ) + dataset2 = SqlaTable( + table_name="my_dataset", + schema="dev", + database=database, + ) + session.add_all([database, dataset1, dataset2]) + session.flush() + + # same table name, different schema + assert ( + DatasetDAO.validate_update_uniqueness( + database_id=database.id, + schema=dataset1.schema, + dataset_id=dataset1.id, + name=dataset1.table_name, + ) + is True + ) + + # duplicate schema and table name + assert ( + DatasetDAO.validate_update_uniqueness( + database_id=database.id, + schema=dataset2.schema, + dataset_id=dataset1.id, + name=dataset1.table_name, + ) + is False + ) + + # no schema + assert ( + DatasetDAO.validate_update_uniqueness( + database_id=database.id, + schema=None, + dataset_id=dataset1.id, + name=dataset1.table_name, + ) + is True + ) diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py index 963953d18b48..1b50a683a084 100644 --- a/tests/unit_tests/db_engine_specs/test_trino.py +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -352,7 +352,7 @@ def test_handle_cursor_early_cancel( query_id = "myQueryId" cursor_mock = engine_mock.return_value.__enter__.return_value - cursor_mock.stats = {"queryId": query_id} + cursor_mock.query_id = query_id session_mock = mocker.MagicMock() query = Query() @@ -366,3 +366,32 @@ def test_handle_cursor_early_cancel( assert cancel_query_mock.call_args[1]["cancel_query_id"] == query_id else: assert cancel_query_mock.call_args is None + + +def test_execute_with_cursor_in_parallel(mocker: MockerFixture): + """Test that `execute_with_cursor` fetches query ID from the cursor""" + from superset.db_engine_specs.trino import TrinoEngineSpec + + query_id = "myQueryId" + + mock_cursor = mocker.MagicMock() + mock_cursor.query_id = None + + mock_query = mocker.MagicMock() + mock_session = mocker.MagicMock() + + def _mock_execute(*args, **kwargs): + mock_cursor.query_id = query_id + + mock_cursor.execute.side_effect = _mock_execute + + TrinoEngineSpec.execute_with_cursor( + cursor=mock_cursor, + sql="SELECT 1 FROM foo", + query=mock_query, + session=mock_session, + ) + + mock_query.set_extra_json_key.assert_called_once_with( + key=QUERY_CANCEL_KEY, value=query_id + ) diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py index fe4b144d2fd7..114f04630016 100644 --- a/tests/unit_tests/jinja_context_test.py +++ b/tests/unit_tests/jinja_context_test.py @@ -20,17 +20,22 @@ import pytest from pytest_mock import MockFixture +from sqlalchemy.dialects import mysql from superset.datasets.commands.exceptions import DatasetNotFoundError -from superset.jinja_context import dataset_macro, where_in +from superset.jinja_context import dataset_macro, WhereInMacro def test_where_in() -> None: """ Test the ``where_in`` Jinja2 filter. """ + where_in = WhereInMacro(mysql.dialect()) assert where_in([1, "b", 3]) == "(1, 'b', 3)" - assert where_in([1, "b", 3], '"') == '(1, "b", 3)' + assert where_in([1, "b", 3], '"') == ( + "(1, 'b', 3)\n-- WARNING: the `mark` parameter was removed from the " + "`where_in` macro for security reasons\n" + ) assert where_in(["O'Malley's"]) == "('O''Malley''s')" diff --git a/tests/unit_tests/sql_lab_test.py b/tests/unit_tests/sql_lab_test.py index 29f45eab682a..edc1fd2ec4a5 100644 --- a/tests/unit_tests/sql_lab_test.py +++ b/tests/unit_tests/sql_lab_test.py @@ -55,8 +55,8 @@ def test_execute_sql_statement(mocker: MockerFixture, app: None) -> None: ) database.apply_limit_to_sql.assert_called_with("SELECT 42 AS answer", 2, force=True) - db_engine_spec.execute.assert_called_with( - cursor, "SELECT 42 AS answer LIMIT 2", async_=True + db_engine_spec.execute_with_cursor.assert_called_with( + cursor, "SELECT 42 AS answer LIMIT 2", query, session ) SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) @@ -106,10 +106,8 @@ def test_execute_sql_statement_with_rls( 101, force=True, ) - db_engine_spec.execute.assert_called_with( - cursor, - "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", - async_=True, + db_engine_spec.execute_with_cursor.assert_called_with( + cursor, "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", query, session ) SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec)