pierrejeambrun commented on code in PR #45939:
URL: https://github.com/apache/airflow/pull/45939#discussion_r1927014283


##########
airflow/api_fastapi/core_api/services/public/pools.py:
##########
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from fastapi import HTTPException, status
+from pydantic import ValidationError
+from sqlalchemy import select
+
+from airflow.api_fastapi.core_api.datamodels.common import 
BulkActionNotOnExistence, BulkActionOnExistence
+from airflow.api_fastapi.core_api.datamodels.pools import (
+    PoolBulkActionResponse,
+    PoolBulkCreateAction,
+    PoolBulkDeleteAction,
+    PoolBulkUpdateAction,
+    PoolPatchBody,
+)
+from airflow.models.pool import Pool
+
+
+def categorize_pools(session, pool_names: set) -> tuple[dict, set, set]:
+    """
+    Categorize the given pool_names into matched_pool_names and 
not_found_pool_names based on existing pool_names.
+
+    Existed pools are returned as a dict of {pool_name : Pool}.
+
+    :param session: SQLAlchemy session
+    :param pool_names: set of pool_names
+    :return: tuple of dict of existed pools, set of matched pool_names, set of 
not found pool_names
+    """
+    existed_pools = 
session.execute(select(Pool).filter(Pool.pool.in_(pool_names))).scalars()
+    existed_pools_dict = {pool.pool: pool for pool in existed_pools}
+    matched_pool_names = set(existed_pools_dict.keys())
+    not_found_pool_names = pool_names - matched_pool_names
+    return existed_pools_dict, matched_pool_names, not_found_pool_names
+
+
+def handle_bulk_create(session, action: PoolBulkCreateAction, results: 
PoolBulkActionResponse) -> None:
+    """Bulk create pools."""
+    to_create_pool_names = {pool.pool for pool in action.pools}
+    existed_pools_dict, matched_pool_names, not_found_pool_names = 
categorize_pools(

Review Comment:
   ```suggestion
       existing_pools_dict, matched_pool_names, not_found_pool_names = 
categorize_pools(
   ```



##########
airflow/api_fastapi/core_api/services/public/pools.py:
##########
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from fastapi import HTTPException, status
+from pydantic import ValidationError
+from sqlalchemy import select
+
+from airflow.api_fastapi.core_api.datamodels.common import 
BulkActionNotOnExistence, BulkActionOnExistence
+from airflow.api_fastapi.core_api.datamodels.pools import (
+    PoolBulkActionResponse,
+    PoolBulkCreateAction,
+    PoolBulkDeleteAction,
+    PoolBulkUpdateAction,
+    PoolPatchBody,
+)
+from airflow.models.pool import Pool
+
+
+def categorize_pools(session, pool_names: set) -> tuple[dict, set, set]:
+    """
+    Categorize the given pool_names into matched_pool_names and 
not_found_pool_names based on existing pool_names.
+
+    Existed pools are returned as a dict of {pool_name : Pool}.

Review Comment:
   ```suggestion
       Existing pools are returned as a dict of {pool_name : Pool}.
   ```



##########
airflow/api_fastapi/core_api/services/public/pools.py:
##########
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from fastapi import HTTPException, status
+from pydantic import ValidationError
+from sqlalchemy import select
+
+from airflow.api_fastapi.core_api.datamodels.common import 
BulkActionNotOnExistence, BulkActionOnExistence
+from airflow.api_fastapi.core_api.datamodels.pools import (
+    PoolBulkActionResponse,
+    PoolBulkCreateAction,
+    PoolBulkDeleteAction,
+    PoolBulkUpdateAction,
+    PoolPatchBody,
+)
+from airflow.models.pool import Pool
+
+
+def categorize_pools(session, pool_names: set) -> tuple[dict, set, set]:

Review Comment:
   session is missing type.



##########
airflow/api_fastapi/core_api/datamodels/connections.py:
##########
@@ -97,20 +102,23 @@ class ConnectionBulkCreateAction(BulkBaseAction):
 
     action: BulkAction = BulkAction.CREATE
     connections: list[ConnectionBody] = Field(..., description="A list of 
connections to be created.")
+    action_on_existence: BulkActionOnExistence = BulkActionOnExistence.FAIL
 
 
 class ConnectionBulkUpdateAction(BulkBaseAction):
     """Bulk Update Connection serializer for request bodies."""
 
     action: BulkAction = BulkAction.UPDATE
     connections: list[ConnectionBody] = Field(..., description="A list of 
connections to be updated.")
+    action_not_on_existence: BulkActionNotOnExistence = 
BulkActionNotOnExistence.FAIL

Review Comment:
   Maybe:
   ```suggestion
       action_on_missing: BulkActionNotOnExistence = 
BulkActionNotOnExistence.FAIL
   ```
   Or `action_on_non_existance` ?



##########
airflow/api_fastapi/core_api/services/public/pools.py:
##########
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from fastapi import HTTPException, status
+from pydantic import ValidationError
+from sqlalchemy import select
+
+from airflow.api_fastapi.core_api.datamodels.common import 
BulkActionNotOnExistence, BulkActionOnExistence
+from airflow.api_fastapi.core_api.datamodels.pools import (
+    PoolBulkActionResponse,
+    PoolBulkCreateAction,
+    PoolBulkDeleteAction,
+    PoolBulkUpdateAction,
+    PoolPatchBody,
+)
+from airflow.models.pool import Pool
+
+
+def categorize_pools(session, pool_names: set) -> tuple[dict, set, set]:
+    """
+    Categorize the given pool_names into matched_pool_names and 
not_found_pool_names based on existing pool_names.
+
+    Existed pools are returned as a dict of {pool_name : Pool}.
+
+    :param session: SQLAlchemy session
+    :param pool_names: set of pool_names
+    :return: tuple of dict of existed pools, set of matched pool_names, set of 
not found pool_names
+    """
+    existed_pools = 
session.execute(select(Pool).filter(Pool.pool.in_(pool_names))).scalars()
+    existed_pools_dict = {pool.pool: pool for pool in existed_pools}
+    matched_pool_names = set(existed_pools_dict.keys())
+    not_found_pool_names = pool_names - matched_pool_names
+    return existed_pools_dict, matched_pool_names, not_found_pool_names
+
+
+def handle_bulk_create(session, action: PoolBulkCreateAction, results: 
PoolBulkActionResponse) -> None:
+    """Bulk create pools."""
+    to_create_pool_names = {pool.pool for pool in action.pools}
+    existed_pools_dict, matched_pool_names, not_found_pool_names = 
categorize_pools(
+        session, to_create_pool_names
+    )
+    try:
+        if action.action_on_existence == BulkActionOnExistence.FAIL and 
matched_pool_names:
+            raise HTTPException(
+                status_code=status.HTTP_409_CONFLICT,
+                detail=f"The pools with these pool names: {matched_pool_names} 
already exist.",
+            )
+        elif action.action_on_existence == BulkActionOnExistence.SKIP:
+            create_pool_names = not_found_pool_names
+        else:
+            create_pool_names = to_create_pool_names
+
+        for pool in action.pools:
+            if pool.pool in create_pool_names:
+                if pool.pool in matched_pool_names:
+                    existed_pool = existed_pools_dict[pool.pool]
+                    for key, val in pool.model_dump().items():
+                        setattr(existed_pool, key, val)
+                else:
+                    session.add(Pool(**pool.model_dump()))
+                results.success.append(pool.pool)
+        session.commit()

Review Comment:
   I don't think we should commit here.
   
   The session dependency will commit on request end. Ensuring that only 1 
commit is done throughout the entire transaction / http request. (It would fail 
as a whole or succeed as a whole).
   
   Here we could still have the `handle_bulk_create` that writes to the db, 
while the rest of the request plainly fails for some reason (500). The user 
wouldn't expect pools to be created.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to