This is an automated email from the ASF dual-hosted git repository.

beto pushed a commit to branch dbt-metricflow
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/dbt-metricflow by this push:
     new 8f6ae15dfb DAO and API
8f6ae15dfb is described below

commit 8f6ae15dfb90fbd7b45dcad0543c8e26a7bae22d
Author: Beto Dealmeida <robe...@dealmeida.net>
AuthorDate: Thu Jul 17 08:19:42 2025 -0400

    DAO and API
---
 superset/constants.py                  |  1 +
 superset/daos/database.py              | 34 +++++++++++++++++
 superset/databases/api.py              | 69 ++++++++++++++++++++++++++++++++++
 superset/databases/schemas.py          | 30 +++++++++++++++
 superset/db_engine_specs/base.py       | 10 ++---
 superset/db_engine_specs/metricflow.py |  8 ++--
 6 files changed, 143 insertions(+), 9 deletions(-)

diff --git a/superset/constants.py b/superset/constants.py
index f60b79a961..f971c66c55 100644
--- a/superset/constants.py
+++ b/superset/constants.py
@@ -174,6 +174,7 @@ MODEL_API_RW_METHOD_PERMISSION_MAP = {
     "put_filters": "write",
     "put_colors": "write",
     "sync_permissions": "write",
+    "valid_metrics_and_dimensions": "read",
 }
 
 EXTRA_FORM_DATA_APPEND_KEYS = {
diff --git a/superset/daos/database.py b/superset/daos/database.py
index fa035534ee..d833012b0d 100644
--- a/superset/daos/database.py
+++ b/superset/daos/database.py
@@ -23,6 +23,7 @@ from superset.connectors.sqla.models import SqlaTable
 from superset.daos.base import BaseDAO
 from superset.databases.filters import DatabaseFilter
 from superset.databases.ssh_tunnel.models import SSHTunnel
+from superset.db_engine_specs.base import ValidColumnsType
 from superset.extensions import db
 from superset.models.core import Database, DatabaseUserOAuth2Tokens
 from superset.models.dashboard import Dashboard
@@ -166,6 +167,39 @@ class DatabaseDAO(BaseDAO[Database]):
 
         return ssh_tunnel
 
+    @classmethod
+    def get_valid_metrics_and_dimensions(
+        cls,
+        database_id: int,
+        datasource_id: int,
+        dimensions: set[str],
+        metrics: set[str],
+    ) -> ValidColumnsType:
+        """
+        Get valid metrics and dimensions for a datasource using the database 
engine spec.
+
+        :param database_id: The database ID
+        :param datasource_id: The datasource ID
+        :param dimensions: Set of selected column names
+        :param metrics: Set of selected metric names
+        :return: Dictionary with 'dimensions' and 'metrics' keys containing 
valid sets
+        :raises ValueError: If database or datasource not found, or invalid 
type
+        """
+        database = cls.find_by_id(database_id)
+        if not database:
+            raise ValueError(f"Database with id {database_id} not found")
+
+        datasource = db.session.query(SqlaTable).get(datasource_id)
+        if not datasource:
+            raise ValueError(f"Table with id {datasource_id} not found")
+
+        return database.db_engine_spec.get_valid_metrics_and_dimensions(
+            database,
+            datasource,
+            dimensions,
+            metrics,
+        )
+
 
 class SSHTunnelDAO(BaseDAO[SSHTunnel]):
     @classmethod
diff --git a/superset/databases/api.py b/superset/databases/api.py
index c9b882dd6b..42e58c55b4 100644
--- a/superset/databases/api.py
+++ b/superset/databases/api.py
@@ -103,6 +103,7 @@ from superset.databases.schemas import (
     UploadPostSchema,
     ValidateSQLRequest,
     ValidateSQLResponse,
+    ValidMetricsAndDimensionsRequestSchema,
 )
 from superset.databases.utils import get_table_metadata
 from superset.db_engine_specs import get_available_engine_specs
@@ -164,6 +165,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
         "available",
         "validate_parameters",
         "validate_sql",
+        "valid_metrics_and_dimensions",
         "delete_ssh_tunnel",
         "schemas_access_for_file_upload",
         "get_connection",
@@ -2098,3 +2100,70 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
             database, database.get_default_catalog(), schemas_allowed, True
         )
         return self.response(200, schemas=schemas_allowed_processed)
+
+    @expose("/<int:pk>/valid_metrics_and_dimensions/", methods=("POST",))
+    @protect()
+    @statsd_metrics
+    @event_logger.log_this_with_context(
+        action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
+        f".valid_metrics_and_dimensions",
+        log_to_statsd=False,
+    )
+    @requires_json
+    def valid_metrics_and_dimensions(self, pk: int) -> FlaskResponse:
+        """Get valid metrics and dimensions for a datasource.
+        ---
+        post:
+          summary: Get valid metrics and dimensions for a datasource
+          parameters:
+          - in: path
+            schema:
+              type: integer
+            name: pk
+            description: The database ID
+          requestBody:
+            description: Valid metrics and dimensions request
+            required: true
+            content:
+              application/json:
+                schema:
+                  $ref: 
"#/components/schemas/ValidMetricsAndDimensionsRequestSchema"
+          responses:
+            200:
+              description: Valid metrics and dimensions
+              content:
+                application/json:
+                  schema:
+                    $ref: 
"#/components/schemas/ValidMetricsAndDimensionsResponseSchema"
+            400:
+              $ref: '#/components/responses/400'
+            401:
+              $ref: '#/components/responses/401'
+            404:
+              $ref: '#/components/responses/404'
+            500:
+              $ref: '#/components/responses/500'
+        """
+        request_schema = ValidMetricsAndDimensionsRequestSchema()
+        try:
+            item = request_schema.load(request.json)
+        except ValidationError as error:
+            return self.response_400(message=error.messages)
+
+        datasource_id = item["datasource_id"]
+        dimensions = set(item["dimensions"])
+        metrics = set(item["metrics"])
+
+        result = DatabaseDAO.get_valid_metrics_and_dimensions(
+            pk,
+            datasource_id,
+            dimensions,
+            metrics,
+        )
+
+        response_data = {
+            "dimensions": list(result["dimensions"]),
+            "metrics": list(result["metrics"]),
+        }
+
+        return self.response(200, **response_data)
diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py
index b91dbd25d5..f1efbfc7d4 100644
--- a/superset/databases/schemas.py
+++ b/superset/databases/schemas.py
@@ -1341,3 +1341,33 @@ class QualifiedTableSchema(Schema):
         load_default=None,
         metadata={"description": "The table catalog"},
     )
+
+
+class ValidMetricsAndDimensionsRequestSchema(Schema):
+    datasource_id = fields.Integer(
+        required=True,
+        metadata={"description": "The datasource ID"},
+    )
+    dimensions = fields.List(
+        fields.String(),
+        required=True,
+        missing=[],
+        metadata={"description": "List of selected dimension names"},
+    )
+    metrics = fields.List(
+        fields.String(),
+        required=True,
+        missing=[],
+        metadata={"description": "List of selected metric names"},
+    )
+
+
+class ValidMetricsAndDimensionsResponseSchema(Schema):
+    dimensions = fields.List(
+        fields.String(),
+        metadata={"description": "List of valid dimension names"},
+    )
+    metrics = fields.List(
+        fields.String(),
+        metadata={"description": "List of valid metric names"},
+    )
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 4014f1b8dc..ddb601a17c 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -191,10 +191,10 @@ class MetricType(TypedDict, total=False):
 
 class ValidColumnsType(TypedDict):
     """
-    Type for valid columns returned by `get_valid_columns`.
+    Type for valid columns returned by `get_valid_metrics_and_dimensions`.
     """
 
-    columns: set[str]
+    dimensions: set[str]
     metrics: set[str]
 
 
@@ -1519,11 +1519,11 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
         ]
 
     @classmethod
-    def get_valid_columns(
+    def get_valid_metrics_and_dimensions(
         cls,
         database: Database,
         datasource: ExploreMixin,
-        columns: set[str],
+        dimensions: set[str],
         metrics: set[str],
     ) -> ValidColumnsType:
         """
@@ -1539,7 +1539,7 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
         metrics, and simply returns everything, for reference.
         """
         return {
-            "columns": {column.column_name for column in datasource.columns},
+            "dimensions": {column.column_name for column in 
datasource.columns},
             "metrics": {metric.metric_name for metric in datasource.metrics},
         }
 
diff --git a/superset/db_engine_specs/metricflow.py 
b/superset/db_engine_specs/metricflow.py
index 77ea1552bf..7e3a51d125 100644
--- a/superset/db_engine_specs/metricflow.py
+++ b/superset/db_engine_specs/metricflow.py
@@ -142,15 +142,15 @@ class DbtMetricFlowEngineSpec(ShillelaghEngineSpec):
         ]
 
     @classmethod
-    def get_valid_columns(
+    def get_valid_metrics_and_dimensions(
         cls,
         database: Database,
         datasource: ExploreMixin,
-        columns: set[str],
+        dimensions: set[str],
         metrics: set[str],
     ) -> ValidColumnsType:
         """
-        Get valid columns.
+        Get valid metrics and dimensions.
 
         Given a datasource, and sets of selected metrics and dimensions, 
return the
         sets of valid metrics and dimensions that can further be selected.
@@ -160,6 +160,6 @@ class DbtMetricFlowEngineSpec(ShillelaghEngineSpec):
             adapter = get_adapter_for_table_name(connection, TABLE_NAME)
 
         return {
-            "metrics": adapter._get_metrics_for_dimensions(columns),
+            "metrics": adapter._get_metrics_for_dimensions(dimensions),
             "dimensions": adapter._get_dimensions_for_metrics(metrics),
         }

Reply via email to