This is an automated email from the ASF dual-hosted git repository.
eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new ce03fd67288 Remove deprecated classes scheduled for March 2026 (#64098)
ce03fd67288 is described below
commit ce03fd67288afb49ba4ea641d6938e3e225145be
Author: Nitochkin <[email protected]>
AuthorDate: Mon Mar 23 19:02:52 2026 +0100
Remove deprecated classes scheduled for March 2026 (#64098)
Co-authored-by: Anton Nitochkin <[email protected]>
---
providers/google/docs/changelog.rst | 1 +
.../google/docs/operators/cloud/vertex_ai.rst | 22 +--
.../google/cloud/hooks/vertex_ai/auto_ml.py | 181 ---------------------
.../google/cloud/operators/vertex_ai/auto_ml.py | 81 ---------
.../cloud/vertex_ai/example_vertex_ai_dataset.py | 24 +--
.../unit/google/cloud/operators/test_vertex_ai.py | 89 ----------
6 files changed, 5 insertions(+), 393 deletions(-)
diff --git a/providers/google/docs/changelog.rst
b/providers/google/docs/changelog.rst
index 955662024c7..d3874a75be0 100644
--- a/providers/google/docs/changelog.rst
+++ b/providers/google/docs/changelog.rst
@@ -70,6 +70,7 @@ Changelog
* Remove
``airflow.providers.google.cloud.operators.vertex_ai.generative_model.CreateCachedContentOperator``
use
``airflow.providers.google.cloud.operators.gen_ai.generative_model.GenAICreateCachedContentOperator``
instead
* Remove
``airflow.providers.google.cloud.operators.vertex_ai.generative_model.GenerateFromCachedContentOperator``
use
``airflow.providers.google.cloud.operators.gen_ai.generative_model.GenAIGenerateContentOperator``
instead
* Remove
``airflow.providers.google.cloud.operators.vertex_ai.generative_model.DeleteExperimentRunOperator``
use
``airflow.providers.google.cloud.operators.vertex_ai.experiment_service.DeleteExperimentRunOperator``
instead
+ * Remove
``airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLVideoTrainingJobOperator``
use
``airflow.providers.google.cloud.operators.gen_ai.generative_model.GenAISupervisedFineTuningTrainOperator``
instead
* Hooks
diff --git a/providers/google/docs/operators/cloud/vertex_ai.rst
b/providers/google/docs/operators/cloud/vertex_ai.rst
index dd840ec6750..246788c9e93 100644
--- a/providers/google/docs/operators/cloud/vertex_ai.rst
+++ b/providers/google/docs/operators/cloud/vertex_ai.rst
@@ -20,7 +20,7 @@ Google Cloud VertexAI Operators
The `Google Cloud VertexAI <https://cloud.google.com/vertex-ai/docs>`__
brings AutoML and AI Platform together into a unified API, client library, and
user
-interface. AutoML lets you train models on image, tabular, text, and video
datasets
+interface. AutoML lets you train models on image, tabular, and text datasets
without writing code, while training in AI Platform lets you run custom
training code.
With Vertex AI, both AutoML training and custom training are available options.
Whichever option you choose for training, you can save models, deploy models,
and
@@ -212,12 +212,11 @@ If you wish to delete a Custom Training Job you can use
Creating an AutoML Training Jobs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-To create a Google Vertex AI Auto ML training jobs you have five operators
+To create a Google Vertex AI Auto ML training jobs you have four operators
:class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLForecastingTrainingJobOperator`
:class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLImageTrainingJobOperator`
:class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLTabularTrainingJobOperator`
:class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.SupervisedFineTuningTrainOperator`
-:class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLVideoTrainingJobOperator`
Each of them will wait for the operation to complete. The results of each
operator will be a model
which was trained by user using these operators.
@@ -265,23 +264,6 @@ put dataset id to ``dataset_id`` parameter in operator.
:start-after: [START
how_to_cloud_vertex_ai_create_auto_ml_tabular_training_job_operator]
:end-before: [END
how_to_cloud_vertex_ai_create_auto_ml_tabular_training_job_operator]
-.. warning::
- This operator is deprecated and will be removed after March 24, 2026.
Please use
-
:class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.SupervisedFineTuningTrainOperator`.
-
-How to run AutoML Video Training Job
-:class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLVideoTrainingJobOperator`
-
-Before start running this Job you must prepare and create ``Video`` dataset.
After that you should
-put dataset id to ``dataset_id`` parameter in operator.
-
-Additionally, you can create new version of existing AutoML Video Training
Job. In this case, the result will be new
-version of existing Model instead of new Model created in Model Registry. This
can be done by specifying
-``parent_model`` parameter when running AutoML Video Training Job.
-
-Also you can use vertex_ai AutoML model for video tracking.
-
-
You can get a list of AutoML Training Jobs using
:class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.ListAutoMLTrainingJobOperator`.
diff --git
a/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py
b/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py
index 7a64fffc792..9ecbbeaf194 100644
---
a/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py
+++
b/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py
@@ -185,30 +185,6 @@ class AutoMLHook(GoogleBaseHook, OperationHelper):
model_encryption_spec_key_name=model_encryption_spec_key_name,
)
- def get_auto_ml_video_training_job(
- self,
- display_name: str,
- prediction_type: str = "classification",
- model_type: str = "CLOUD",
- project: str | None = None,
- location: str | None = None,
- labels: dict[str, str] | None = None,
- training_encryption_spec_key_name: str | None = None,
- model_encryption_spec_key_name: str | None = None,
- ) -> AutoMLVideoTrainingJob:
- """Return AutoMLVideoTrainingJob object."""
- return AutoMLVideoTrainingJob(
- display_name=display_name,
- prediction_type=prediction_type,
- model_type=model_type,
- project=project,
- location=location,
- credentials=self.get_credentials(),
- labels=labels,
-
training_encryption_spec_key_name=training_encryption_spec_key_name,
- model_encryption_spec_key_name=model_encryption_spec_key_name,
- )
-
@staticmethod
def extract_model_id(obj: dict) -> str:
"""Return unique id of the Model."""
@@ -951,163 +927,6 @@ class AutoMLHook(GoogleBaseHook, OperationHelper):
)
return model, training_id
- @GoogleBaseHook.fallback_to_default_project_id
- def create_auto_ml_video_training_job(
- self,
- project_id: str,
- region: str,
- display_name: str,
- dataset: datasets.VideoDataset,
- prediction_type: str = "classification",
- model_type: str = "CLOUD",
- labels: dict[str, str] | None = None,
- training_encryption_spec_key_name: str | None = None,
- model_encryption_spec_key_name: str | None = None,
- training_fraction_split: float | None = None,
- test_fraction_split: float | None = None,
- training_filter_split: str | None = None,
- test_filter_split: str | None = None,
- model_display_name: str | None = None,
- model_labels: dict[str, str] | None = None,
- sync: bool = True,
- parent_model: str | None = None,
- is_default_version: bool | None = None,
- model_version_aliases: list[str] | None = None,
- model_version_description: str | None = None,
- ) -> tuple[models.Model | None, str]:
- """
- Create an AutoML Video Training Job.
-
- :param project_id: Required. Project to run training in.
- :param region: Required. Location to run training in.
- :param display_name: Required. The user-defined name of this
TrainingPipeline.
- :param dataset: Required. The dataset within the same Project from
which data will be used to train
- the Model. The Dataset must use schema compatible with Model being
trained, and what is
- compatible should be described in the used TrainingPipeline's
[training_task_definition]
-
[google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition].
For tabular
- Datasets, all their data is exported to training, to pick and
choose from.
- :param prediction_type: The type of prediction the Model is to
produce, one of:
- "classification" - A video classification model classifies shots
and segments in your videos
- according to your own defined labels.
- "object_tracking" - A video object tracking model detects and
tracks multiple objects in shots
- and segments. You can use these models to track objects in your
videos according to your own
- pre-defined, custom labels.
- "action_recognition" - A video action recognition model pinpoints
the location of actions with
- short temporal durations (~1 second).
- :param parent_model: Optional. The resource name or model ID of an
existing model.
- The new model uploaded by this job will be a version of
`parent_model`.
- Only set this field when training a new version of an existing
model.
- :param is_default_version: Optional. When set to True, the newly
uploaded model version will
- automatically have alias "default" included. Subsequent uses of
- the model produced by this job without a version specified will
- use this "default" version.
- When set to False, the "default" alias will not be moved.
- Actions targeting the model version produced by this job will need
- to specifically reference this version by ID or alias.
- New model uploads, i.e. version 1, will always be "default"
aliased.
- :param model_version_aliases: Optional. User provided version aliases
so that the model version
- uploaded by this job can be referenced via alias instead of
- auto-generated version ID. A default version alias will be created
- for the first version of the model.
- The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9]
- :param model_version_description: Optional. The description of the
model version
- being uploaded by this job.
- :param model_type: Required. One of the following:
- "CLOUD" - available for "classification", "object_tracking" and
"action_recognition" A Model best
- tailored to be used within Google Cloud, and which cannot be
exported.
- "MOBILE_VERSATILE_1" - available for "classification",
"object_tracking" and "action_recognition"
- A model that, in addition to being available within Google Cloud,
can also be exported (see
- ModelService.ExportModel) as a TensorFlow or TensorFlow Lite model
and used on a mobile or edge
- device with afterwards.
- "MOBILE_CORAL_VERSATILE_1" - available only for "object_tracking"
A versatile model that is meant
- to be exported (see ModelService.ExportModel) and used on a Google
Coral device.
- "MOBILE_CORAL_LOW_LATENCY_1" - available only for
"object_tracking" A model that trades off
- quality for low latency, to be exported (see
ModelService.ExportModel) and used on a Google Coral
- device.
- "MOBILE_JETSON_VERSATILE_1" - available only for "object_tracking"
A versatile model that is
- meant to be exported (see ModelService.ExportModel) and used on an
NVIDIA Jetson device.
- "MOBILE_JETSON_LOW_LATENCY_1" - available only for
"object_tracking" A model that trades off
- quality for low latency, to be exported (see
ModelService.ExportModel) and used on an NVIDIA
- Jetson device.
- :param labels: Optional. The labels with user-defined metadata to
organize TrainingPipelines. Label
- keys and values can be no longer than 64 characters (Unicode
codepoints), can only contain
- lowercase letters, numeric characters, underscores and dashes.
International characters are
- allowed. See https://goo.gl/xmQnxf for more information and
examples of labels.
- :param training_encryption_spec_key_name: Optional. The Cloud KMS
resource identifier of the customer
- managed encryption key used to protect the training pipeline. Has
the form:
-
``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``.
- The key needs to be in the same region as where the compute
resource is created.
- If set, this TrainingPipeline will be secured by this key.
- Note: Model trained by this TrainingPipeline is also secured by
this key if ``model_to_upload``
- is not set separately.
- :param model_encryption_spec_key_name: Optional. The Cloud KMS
resource identifier of the customer
- managed encryption key used to protect the model. Has the form:
-
``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``.
- The key needs to be in the same region as where the compute
resource is created.
- If set, the trained Model will be secured by this key.
- :param training_fraction_split: Optional. The fraction of the input
data that is to be used to train
- the Model. This is ignored if Dataset is not provided.
- :param test_fraction_split: Optional. The fraction of the input data
that is to be used to evaluate
- the Model. This is ignored if Dataset is not provided.
- :param training_filter_split: Optional. A filter on DataItems of the
Dataset. DataItems that match
- this filter are used to train the Model. A filter with same syntax
as the one used in
- DatasetService.ListDataItems may be used. If a single DataItem is
matched by more than one of the
- FilterSplit filters, then it is assigned to the first set that
applies to it in the training,
- validation, test order. This is ignored if Dataset is not provided.
- :param test_filter_split: Optional. A filter on DataItems of the
Dataset. DataItems that match this
- filter are used to test the Model. A filter with same syntax as
the one used in
- DatasetService.ListDataItems may be used. If a single DataItem is
matched by more than one of the
- FilterSplit filters, then it is assigned to the first set that
applies to it in the training,
- validation, test order. This is ignored if Dataset is not provided.
- :param model_display_name: Optional. The display name of the managed
Vertex AI Model. The name can be
- up to 128 characters long and can be consist of any UTF-8
characters. If not provided upon
- creation, the job's display_name is used.
- :param model_labels: Optional. The labels with user-defined metadata
to organize your Models. Label
- keys and values can be no longer than 64 characters (Unicode
codepoints), can only contain
- lowercase letters, numeric characters, underscores and dashes.
International characters are
- allowed. See https://goo.gl/xmQnxf for more information and
examples of labels.
- :param sync: Whether to execute this method synchronously. If False,
this method will be executed in
- concurrent Future and any downstream object will be immediately
returned and synced when the
- Future has completed.
- """
- self._job = self.get_auto_ml_video_training_job(
- project=project_id,
- location=region,
- display_name=display_name,
- prediction_type=prediction_type,
- model_type=model_type,
- labels=labels,
-
training_encryption_spec_key_name=training_encryption_spec_key_name,
- model_encryption_spec_key_name=model_encryption_spec_key_name,
- )
-
- if not self._job:
- raise AirflowException("AutoMLVideoTrainingJob was not created")
-
- model = self._job.run(
- dataset=dataset,
- training_fraction_split=training_fraction_split,
- test_fraction_split=test_fraction_split,
- training_filter_split=training_filter_split,
- test_filter_split=test_filter_split,
- model_display_name=model_display_name,
- model_labels=model_labels,
- sync=sync,
- parent_model=parent_model,
- is_default_version=is_default_version,
- model_version_aliases=model_version_aliases,
- model_version_description=model_version_description,
- )
- training_id = self.extract_training_id(self._job.resource_name)
- if model:
- model.wait()
- else:
- self.log.warning(
- "Training did not produce a Managed Model returning None.
AutoML Video Training "
- "Pipeline is not configured to upload a Model."
- )
- return model, training_id
-
@GoogleBaseHook.fallback_to_default_project_id
def delete_training_pipeline(
self,
diff --git
a/providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py
b/providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py
index 7884bac0c00..6e764222ef7 100644
---
a/providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py
+++
b/providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py
@@ -29,7 +29,6 @@ from google.cloud.aiplatform import datasets
from google.cloud.aiplatform.models import Model
from google.cloud.aiplatform_v1.types.training_pipeline import TrainingPipeline
-from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.google.cloud.hooks.vertex_ai.auto_ml import AutoMLHook
from airflow.providers.google.cloud.links.vertex_ai import (
VertexAIModelLink,
@@ -37,7 +36,6 @@ from airflow.providers.google.cloud.links.vertex_ai import (
VertexAITrainingPipelinesLink,
)
from airflow.providers.google.cloud.operators.cloud_base import
GoogleCloudBaseOperator
-from airflow.providers.google.common.deprecated import deprecated
if TYPE_CHECKING:
from google.api_core.retry import Retry
@@ -475,85 +473,6 @@ class
CreateAutoMLTabularTrainingJobOperator(AutoMLTrainingJobBaseOperator):
return result
-@deprecated(
- planned_removal_date="March 24, 2026",
-
use_instead="airflow.providers.google.cloud.operators.gen_ai.generative_model.GenAISupervisedFineTuningTrainOperator",
- category=AirflowProviderDeprecationWarning,
-)
-class CreateAutoMLVideoTrainingJobOperator(AutoMLTrainingJobBaseOperator):
- """Create Auto ML Video Training job."""
-
- template_fields = (
- "parent_model",
- "dataset_id",
- "region",
- "impersonation_chain",
- )
- operator_extra_links = (VertexAIModelLink(), VertexAITrainingLink())
-
- def __init__(
- self,
- *,
- dataset_id: str,
- prediction_type: str = "classification",
- model_type: str = "CLOUD",
- training_filter_split: str | None = None,
- test_filter_split: str | None = None,
- region: str,
- impersonation_chain: str | Sequence[str] | None = None,
- parent_model: str | None = None,
- **kwargs,
- ) -> None:
- super().__init__(
- region=region, impersonation_chain=impersonation_chain,
parent_model=parent_model, **kwargs
- )
- self.dataset_id = dataset_id
- self.prediction_type = prediction_type
- self.model_type = model_type
- self.training_filter_split = training_filter_split
- self.test_filter_split = test_filter_split
-
- def execute(self, context: Context):
- self.hook = AutoMLHook(
- gcp_conn_id=self.gcp_conn_id,
- impersonation_chain=self.impersonation_chain,
- )
- self.parent_model = self.parent_model.split("@")[0] if
self.parent_model else None
- model, training_id = self.hook.create_auto_ml_video_training_job(
- project_id=self.project_id,
- region=self.region,
- display_name=self.display_name,
- dataset=datasets.VideoDataset(dataset_name=self.dataset_id),
- prediction_type=self.prediction_type,
- model_type=self.model_type,
- labels=self.labels,
-
training_encryption_spec_key_name=self.training_encryption_spec_key_name,
- model_encryption_spec_key_name=self.model_encryption_spec_key_name,
- training_fraction_split=self.training_fraction_split,
- test_fraction_split=self.test_fraction_split,
- training_filter_split=self.training_filter_split,
- test_filter_split=self.test_filter_split,
- model_display_name=self.model_display_name,
- model_labels=self.model_labels,
- sync=self.sync,
- parent_model=self.parent_model,
- is_default_version=self.is_default_version,
- model_version_aliases=self.model_version_aliases,
- model_version_description=self.model_version_description,
- )
-
- if model:
- result = Model.to_dict(model)
- model_id = self.hook.extract_model_id(result)
- context["ti"].xcom_push(key="model_id", value=model_id)
- VertexAIModelLink.persist(context=context, model_id=model_id)
- else:
- result = model # type: ignore
- context["ti"].xcom_push(key="training_id", value=training_id)
- VertexAITrainingLink.persist(context=context, training_id=training_id)
- return result
-
-
class DeleteAutoMLTrainingJobOperator(GoogleCloudBaseOperator):
"""
Delete an AutoML training job.
diff --git
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py
index fb39901bfd3..9d2658a3b93 100644
---
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py
+++
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py
@@ -90,11 +90,6 @@ TABULAR_DATASET = {
Value(),
),
}
-VIDEO_DATASET = {
- "display_name": f"video-dataset-{ENV_ID}",
- "metadata_schema_uri": schema.dataset.metadata.video,
- "metadata": Value(string_value="video-dataset"),
-}
TEST_EXPORT_CONFIG = {"gcs_destination": {"output_uri_prefix":
f"gs://{DATA_SAMPLE_GCS_BUCKET_NAME}/exports"}}
TEST_IMPORT_CONFIG = [
{
@@ -136,12 +131,6 @@ with DAG(
region=REGION,
project_id=PROJECT_ID,
)
- create_video_dataset_job = CreateDatasetOperator(
- task_id="video_dataset",
- dataset=VIDEO_DATASET,
- region=REGION,
- project_id=PROJECT_ID,
- )
create_time_series_dataset_job = CreateDatasetOperator(
task_id="time_series_dataset",
dataset=TIME_SERIES_DATASET,
@@ -192,7 +181,7 @@ with DAG(
task_id="update_dataset",
project_id=PROJECT_ID,
region=REGION,
- dataset_id=create_video_dataset_job.output["dataset_id"],
+ dataset_id=create_time_series_dataset_job.output["dataset_id"],
dataset=DATASET_TO_UPDATE,
update_mask=TEST_UPDATE_MASK,
)
@@ -224,14 +213,6 @@ with DAG(
trigger_rule=TriggerRule.ALL_DONE,
)
- delete_video_dataset_job = DeleteDatasetOperator(
- task_id="delete_video_dataset",
- dataset_id=create_video_dataset_job.output["dataset_id"],
- region=REGION,
- project_id=PROJECT_ID,
- trigger_rule=TriggerRule.ALL_DONE,
- )
-
delete_bucket = GCSDeleteBucketOperator(
task_id="delete_bucket",
bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME,
@@ -243,10 +224,9 @@ with DAG(
create_bucket
# TEST BODY
>> [
- create_time_series_dataset_job >> delete_dataset_job,
+ create_time_series_dataset_job >> update_dataset_job >>
delete_dataset_job,
create_tabular_dataset_job >> get_dataset >>
delete_tabular_dataset_job,
create_image_dataset_job >> import_data_job >> export_data_job >>
delete_image_dataset_job,
- create_video_dataset_job >> update_dataset_job >>
delete_video_dataset_job,
list_dataset_job,
]
# TEST TEARDOWN
diff --git
a/providers/google/tests/unit/google/cloud/operators/test_vertex_ai.py
b/providers/google/tests/unit/google/cloud/operators/test_vertex_ai.py
index 59d9bdaa468..762648f6192 100644
--- a/providers/google/tests/unit/google/cloud/operators/test_vertex_ai.py
+++ b/providers/google/tests/unit/google/cloud/operators/test_vertex_ai.py
@@ -35,7 +35,6 @@ from
airflow.providers.google.cloud.operators.vertex_ai.auto_ml import (
CreateAutoMLForecastingTrainingJobOperator,
CreateAutoMLImageTrainingJobOperator,
CreateAutoMLTabularTrainingJobOperator,
- CreateAutoMLVideoTrainingJobOperator,
DeleteAutoMLTrainingJobOperator,
ListAutoMLTrainingJobOperator,
)
@@ -1862,94 +1861,6 @@ class TestVertexAICreateAutoMLTabularTrainingJobOperator:
)
-class TestVertexAICreateAutoMLVideoTrainingJobOperator:
- @mock.patch("google.cloud.aiplatform.datasets.VideoDataset")
- @mock.patch(VERTEX_AI_PATH.format("auto_ml.AutoMLHook"))
- def test_execute(self, mock_hook, mock_dataset):
- mock_hook.return_value.create_auto_ml_video_training_job.return_value
= (None, "training_id")
- with pytest.warns(AirflowProviderDeprecationWarning):
- op = CreateAutoMLVideoTrainingJobOperator(
- task_id=TASK_ID,
- gcp_conn_id=GCP_CONN_ID,
- impersonation_chain=IMPERSONATION_CHAIN,
- display_name=DISPLAY_NAME,
- dataset_id=TEST_DATASET_ID,
- prediction_type="classification",
- model_type="CLOUD",
- sync=True,
- region=GCP_LOCATION,
- project_id=GCP_PROJECT,
- parent_model=TEST_PARENT_MODEL,
- )
- op.execute(context={"ti": mock.MagicMock(), "task": mock.MagicMock()})
- mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN)
- mock_dataset.assert_called_once_with(dataset_name=TEST_DATASET_ID)
-
mock_hook.return_value.create_auto_ml_video_training_job.assert_called_once_with(
- project_id=GCP_PROJECT,
- region=GCP_LOCATION,
- display_name=DISPLAY_NAME,
- dataset=mock_dataset.return_value,
- parent_model=TEST_PARENT_MODEL,
- prediction_type="classification",
- model_type="CLOUD",
- labels=None,
- training_encryption_spec_key_name=None,
- model_encryption_spec_key_name=None,
- training_fraction_split=None,
- test_fraction_split=None,
- training_filter_split=None,
- test_filter_split=None,
- model_display_name=None,
- model_labels=None,
- sync=True,
- is_default_version=None,
- model_version_aliases=None,
- model_version_description=None,
- )
-
- @mock.patch("google.cloud.aiplatform.datasets.VideoDataset")
- @mock.patch(VERTEX_AI_PATH.format("auto_ml.AutoMLHook"))
- def test_execute__parent_model_version_index_is_removed(self, mock_hook,
mock_dataset):
- mock_hook.return_value.create_auto_ml_video_training_job.return_value
= (None, "training_id")
- with pytest.warns(AirflowProviderDeprecationWarning):
- op = CreateAutoMLVideoTrainingJobOperator(
- task_id=TASK_ID,
- gcp_conn_id=GCP_CONN_ID,
- impersonation_chain=IMPERSONATION_CHAIN,
- display_name=DISPLAY_NAME,
- dataset_id=TEST_DATASET_ID,
- prediction_type="classification",
- model_type="CLOUD",
- sync=True,
- region=GCP_LOCATION,
- project_id=GCP_PROJECT,
- parent_model=VERSIONED_TEST_PARENT_MODEL,
- )
- op.execute(context={"ti": mock.MagicMock(), "task": mock.MagicMock()})
-
mock_hook.return_value.create_auto_ml_video_training_job.assert_called_once_with(
- project_id=GCP_PROJECT,
- region=GCP_LOCATION,
- display_name=DISPLAY_NAME,
- dataset=mock_dataset.return_value,
- parent_model=TEST_PARENT_MODEL,
- prediction_type="classification",
- model_type="CLOUD",
- labels=None,
- training_encryption_spec_key_name=None,
- model_encryption_spec_key_name=None,
- training_fraction_split=None,
- test_fraction_split=None,
- training_filter_split=None,
- test_filter_split=None,
- model_display_name=None,
- model_labels=None,
- sync=True,
- is_default_version=None,
- model_version_aliases=None,
- model_version_description=None,
- )
-
-
class TestVertexAIDeleteAutoMLTrainingJobOperator:
@mock.patch(VERTEX_AI_PATH.format("auto_ml.AutoMLHook"))
def test_execute(self, mock_hook):