Skip to content

Commit

Permalink
chore: Migrate warm up cache endpoint to api v1 (apache#23853)
Browse files Browse the repository at this point in the history
  • Loading branch information
jfrag1 authored Jun 20, 2023
1 parent 3e76736 commit 5af298e
Show file tree
Hide file tree
Showing 14 changed files with 704 additions and 66 deletions.
61 changes: 61 additions & 0 deletions superset/charts/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
from superset.charts.commands.export import ExportChartsCommand
from superset.charts.commands.importers.dispatcher import ImportChartsCommand
from superset.charts.commands.update import UpdateChartCommand
from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand
from superset.charts.filters import (
ChartAllTextFilter,
ChartCertifiedFilter,
Expand All @@ -59,6 +60,7 @@
)
from superset.charts.schemas import (
CHART_SCHEMAS,
ChartCacheWarmUpRequestSchema,
ChartPostSchema,
ChartPutSchema,
get_delete_ids_schema,
Expand All @@ -68,6 +70,7 @@
screenshot_query_schema,
thumbnail_query_schema,
)
from superset.commands.exceptions import CommandException
from superset.commands.importers.exceptions import (
IncorrectFormatError,
NoValidFilesFoundError,
Expand Down Expand Up @@ -118,6 +121,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]:
"thumbnail",
"screenshot",
"cache_screenshot",
"warm_up_cache",
}
class_permission_name = "Chart"
method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
Expand Down Expand Up @@ -942,6 +946,63 @@ def remove_favorite(self, pk: int) -> Response:
ChartDAO.remove_favorite(chart)
return self.response(200, result="OK")

@expose("/warm_up_cache", methods=("PUT",))
@protect()
@safe
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".warm_up_cache",
log_to_statsd=False,
)
def warm_up_cache(self) -> Response:
"""
---
put:
summary: >-
Warms up the cache for the chart
description: >-
Warms up the cache for the chart.
Note for slices a force refresh occurs.
In terms of the `extra_filters` these can be obtained from records in the JSON
encoded `logs.json` column associated with the `explore_json` action.
requestBody:
description: >-
Identifies the chart to warm up cache for, and any additional dashboard or
filter context to use.
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ChartCacheWarmUpRequestSchema"
responses:
200:
description: Each chart's warmup status
content:
application/json:
schema:
$ref: "#/components/schemas/ChartCacheWarmUpResponseSchema"
400:
$ref: '#/components/responses/400'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
try:
body = ChartCacheWarmUpRequestSchema().load(request.json)
except ValidationError as error:
return self.response_400(message=error.messages)
try:
result = ChartWarmUpCacheCommand(
body["chart_id"],
body.get("dashboard_id"),
body.get("extra_filters"),
).run()
return self.response(200, result=[result])
except CommandException as ex:
return self.response(ex.status, message=ex.message)

@expose("/import/", methods=("POST",))
@protect()
@statsd_metrics
Expand Down
5 changes: 5 additions & 0 deletions superset/charts/commands/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,3 +153,8 @@ class ChartBulkDeleteFailedReportsExistError(ChartBulkDeleteFailedError):

class ChartImportError(ImportFailedError):
message = _("Import chart failed for an unknown reason")


class WarmUpCacheChartNotFoundError(CommandException):
status = 404
message = _("Chart not found")
84 changes: 84 additions & 0 deletions superset/charts/commands/warm_up_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.


from typing import Any, Optional, Union

import simplejson as json
from flask import g

from superset.charts.commands.exceptions import WarmUpCacheChartNotFoundError
from superset.commands.base import BaseCommand
from superset.extensions import db
from superset.models.slice import Slice
from superset.utils.core import error_msg_from_exception
from superset.views.utils import get_dashboard_extra_filters, get_form_data, get_viz


class ChartWarmUpCacheCommand(BaseCommand):
# pylint: disable=too-many-arguments
def __init__(
self,
chart_or_id: Union[int, Slice],
dashboard_id: Optional[int],
extra_filters: Optional[str],
):
self._chart_or_id = chart_or_id
self._dashboard_id = dashboard_id
self._extra_filters = extra_filters

def run(self) -> dict[str, Any]:
self.validate()
chart: Slice = self._chart_or_id # type: ignore
try:
form_data = get_form_data(chart.id, use_slice_data=True)[0]
if self._dashboard_id:
form_data["extra_filters"] = (
json.loads(self._extra_filters)
if self._extra_filters
else get_dashboard_extra_filters(chart.id, self._dashboard_id)
)

if not chart.datasource:
raise Exception("Chart's datasource does not exist")

obj = get_viz(
datasource_type=chart.datasource.type,
datasource_id=chart.datasource.id,
form_data=form_data,
force=True,
)

# pylint: disable=assigning-non-slot
g.form_data = form_data
payload = obj.get_payload()
delattr(g, "form_data")
error = payload["errors"] or None
status = payload["status"]
except Exception as ex: # pylint: disable=broad-except
error = error_msg_from_exception(ex)
status = None

return {"chart_id": chart.id, "viz_error": error, "viz_status": status}

def validate(self) -> None:
if isinstance(self._chart_or_id, Slice):
return
chart = db.session.query(Slice).filter_by(id=self._chart_or_id).scalar()
if not chart:
raise WarmUpCacheChartNotFoundError()
self._chart_or_id = chart
38 changes: 38 additions & 0 deletions superset/charts/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -1557,7 +1557,45 @@ class ImportV1ChartSchema(Schema):
external_url = fields.String(allow_none=True)


class ChartCacheWarmUpRequestSchema(Schema):
chart_id = fields.Integer(
required=True,
metadata={"description": "The ID of the chart to warm up cache for"},
)
dashboard_id = fields.Integer(
metadata={
"description": "The ID of the dashboard to get filters for when warming cache"
}
)
extra_filters = fields.String(
metadata={"description": "Extra filters to apply when warming up cache"}
)


class ChartCacheWarmUpResponseSingleSchema(Schema):
chart_id = fields.Integer(
metadata={"description": "The ID of the chart the status belongs to"}
)
viz_error = fields.String(
metadata={"description": "Error that occurred when warming cache for chart"}
)
viz_status = fields.String(
metadata={"description": "Status of the underlying query for the viz"}
)


class ChartCacheWarmUpResponseSchema(Schema):
result = fields.List(
fields.Nested(ChartCacheWarmUpResponseSingleSchema),
metadata={
"description": "A list of each chart's warmup status and errors if any"
},
)


CHART_SCHEMAS = (
ChartCacheWarmUpRequestSchema,
ChartCacheWarmUpResponseSchema,
ChartDataQueryContextSchema,
ChartDataResponseSchema,
ChartDataAsyncResponseSchema,
Expand Down
66 changes: 66 additions & 0 deletions superset/datasets/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
import json
import logging
from datetime import datetime
Expand All @@ -29,6 +30,7 @@
from marshmallow import ValidationError

from superset import event_logger, is_feature_enabled
from superset.commands.exceptions import CommandException
from superset.commands.importers.exceptions import NoValidFilesFoundError
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.connectors.sqla.models import SqlaTable
Expand All @@ -53,8 +55,11 @@
from superset.datasets.commands.importers.dispatcher import ImportDatasetsCommand
from superset.datasets.commands.refresh import RefreshDatasetCommand
from superset.datasets.commands.update import UpdateDatasetCommand
from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand
from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter
from superset.datasets.schemas import (
DatasetCacheWarmUpRequestSchema,
DatasetCacheWarmUpResponseSchema,
DatasetDuplicateSchema,
DatasetPostSchema,
DatasetPutSchema,
Expand Down Expand Up @@ -95,6 +100,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"related_objects",
"duplicate",
"get_or_create_dataset",
"warm_up_cache",
}
list_columns = [
"id",
Expand Down Expand Up @@ -244,6 +250,8 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"get_export_ids_schema": get_export_ids_schema,
}
openapi_spec_component_schemas = (
DatasetCacheWarmUpRequestSchema,
DatasetCacheWarmUpResponseSchema,
DatasetRelatedObjectsResponse,
DatasetDuplicateSchema,
GetOrCreateDatasetSchema,
Expand Down Expand Up @@ -992,3 +1000,61 @@ def get_or_create_dataset(self) -> Response:
exc_info=True,
)
return self.response_422(message=ex.message)

@expose("/warm_up_cache", methods=("PUT",))
@protect()
@safe
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".warm_up_cache",
log_to_statsd=False,
)
def warm_up_cache(self) -> Response:
"""
---
put:
summary: >-
Warms up the cache for each chart powered by the given table
description: >-
Warms up the cache for the table.
Note for slices a force refresh occurs.
In terms of the `extra_filters` these can be obtained from records in the JSON
encoded `logs.json` column associated with the `explore_json` action.
requestBody:
description: >-
Identifies the database and table to warm up cache for, and any
additional dashboard or filter context to use.
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/DatasetCacheWarmUpRequestSchema"
responses:
200:
description: Each chart's warmup status
content:
application/json:
schema:
$ref: "#/components/schemas/DatasetCacheWarmUpResponseSchema"
400:
$ref: '#/components/responses/400'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
try:
body = DatasetCacheWarmUpRequestSchema().load(request.json)
except ValidationError as error:
return self.response_400(message=error.messages)
try:
result = DatasetWarmUpCacheCommand(
body["db_name"],
body["table_name"],
body.get("dashboard_id"),
body.get("extra_filters"),
).run()
return self.response(200, result=result)
except CommandException as ex:
return self.response(ex.status, message=ex.message)
5 changes: 5 additions & 0 deletions superset/datasets/commands/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,3 +212,8 @@ class DatasetDuplicateFailedError(CreateFailedError):

class DatasetForbiddenDataURI(ImportFailedError):
message = _("Data URI is not allowed.")


class WarmUpCacheTableNotFoundError(CommandException):
status = 404
message = _("The provided table was not found in the provided database")
Loading

0 comments on commit 5af298e

Please sign in to comment.