Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,8 @@
from .types.io import SharePointSources
from .types.io import SlackSource
from .types.io import TFRecordDestination
from .types.io import VertexMultimodalDatasetDestination
from .types.io import VertexMultimodalDatasetSource
from .types.job_service import CancelBatchPredictionJobRequest
from .types.job_service import CancelCustomJobRequest
from .types.job_service import CancelDataLabelingJobRequest
Expand Down Expand Up @@ -2198,6 +2200,8 @@ def _get_version(dependency_name):
"Value",
"VertexAISearch",
"VertexAiSearchConfig",
"VertexMultimodalDatasetDestination",
"VertexMultimodalDatasetSource",
"VertexRagDataServiceClient",
"VertexRagServiceClient",
"VertexRagStore",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,6 @@ async def sample_get_dataset():
request (Optional[Union[google.cloud.aiplatform_v1.types.GetDatasetRequest, dict]]):
The request object. Request message for
[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset].
Next ID: 4
name (:class:`str`):
Required. The name of the Dataset
resource.
Expand Down Expand Up @@ -1704,7 +1703,6 @@ async def sample_get_dataset_version():
request (Optional[Union[google.cloud.aiplatform_v1.types.GetDatasetVersionRequest, dict]]):
The request object. Request message for
[DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion].
Next ID: 4
name (:class:`str`):
Required. The resource name of the Dataset version to
delete. Format:
Expand Down Expand Up @@ -2643,10 +2641,7 @@ async def list_annotations(
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListAnnotationsAsyncPager:
r"""Lists Annotations belongs to a dataitem
This RPC is only available in InternalDatasetService. It
is only used for exporting conversation data to CCAI
Insights.
r"""Lists Annotations belongs to a dataitem.

.. code-block:: python

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1091,7 +1091,6 @@ def sample_get_dataset():
request (Union[google.cloud.aiplatform_v1.types.GetDatasetRequest, dict]):
The request object. Request message for
[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset].
Next ID: 4
name (str):
Required. The name of the Dataset
resource.
Expand Down Expand Up @@ -2261,7 +2260,6 @@ def sample_get_dataset_version():
request (Union[google.cloud.aiplatform_v1.types.GetDatasetVersionRequest, dict]):
The request object. Request message for
[DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion].
Next ID: 4
name (str):
Required. The resource name of the Dataset version to
delete. Format:
Expand Down Expand Up @@ -3177,10 +3175,7 @@ def list_annotations(
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListAnnotationsPager:
r"""Lists Annotations belongs to a dataitem
This RPC is only available in InternalDatasetService. It
is only used for exporting conversation data to CCAI
Insights.
r"""Lists Annotations belongs to a dataitem.

.. code-block:: python

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -853,10 +853,7 @@ def list_annotations(
]:
r"""Return a callable for the list annotations method over gRPC.

Lists Annotations belongs to a dataitem
This RPC is only available in InternalDatasetService. It
is only used for exporting conversation data to CCAI
Insights.
Lists Annotations belongs to a dataitem.

Returns:
Callable[[~.ListAnnotationsRequest],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -880,10 +880,7 @@ def list_annotations(
]:
r"""Return a callable for the list annotations method over gRPC.

Lists Annotations belongs to a dataitem
This RPC is only available in InternalDatasetService. It
is only used for exporting conversation data to CCAI
Insights.
Lists Annotations belongs to a dataitem.

Returns:
Callable[[~.ListAnnotationsRequest],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4569,7 +4569,6 @@ def __call__(
request (~.dataset_service.GetDatasetRequest):
The request object. Request message for
[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset].
Next ID: 4
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
Expand Down Expand Up @@ -4720,7 +4719,6 @@ def __call__(
request (~.dataset_service.GetDatasetVersionRequest):
The request object. Request message for
[DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion].
Next ID: 4
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2839,7 +2839,6 @@ async def __call__(
request (~.dataset_service.GetDatasetRequest):
The request object. Request message for
[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset].
Next ID: 4
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
Expand Down Expand Up @@ -2999,7 +2998,6 @@ async def __call__(
request (~.dataset_service.GetDatasetVersionRequest):
The request object. Request message for
[DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion].
Next ID: 4
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
Expand Down
18 changes: 9 additions & 9 deletions google/cloud/aiplatform_v1/services/migration_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,40 +271,40 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
Expand Down
4 changes: 4 additions & 0 deletions google/cloud/aiplatform_v1/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,8 @@
SharePointSources,
SlackSource,
TFRecordDestination,
VertexMultimodalDatasetDestination,
VertexMultimodalDatasetSource,
)
from .job_service import (
CancelBatchPredictionJobRequest,
Expand Down Expand Up @@ -1665,6 +1667,8 @@
"SharePointSources",
"SlackSource",
"TFRecordDestination",
"VertexMultimodalDatasetDestination",
"VertexMultimodalDatasetSource",
"CancelBatchPredictionJobRequest",
"CancelCustomJobRequest",
"CancelDataLabelingJobRequest",
Expand Down
38 changes: 38 additions & 0 deletions google/cloud/aiplatform_v1/types/batch_prediction_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,11 @@ class InputConfig(proto.Message):
additional columns that are not described by the
schema, and they will be ignored.

This field is a member of `oneof`_ ``source``.
vertex_multimodal_dataset_source (google.cloud.aiplatform_v1.types.VertexMultimodalDatasetSource):
A Vertex Managed Dataset. Currently, only
datasets of type Multimodal are supported.

This field is a member of `oneof`_ ``source``.
instances_format (str):
Required. The format in which instances are given, must be
Expand All @@ -293,6 +298,14 @@ class InputConfig(proto.Message):
oneof="source",
message=io.BigQuerySource,
)
vertex_multimodal_dataset_source: io.VertexMultimodalDatasetSource = (
proto.Field(
proto.MESSAGE,
number=4,
oneof="source",
message=io.VertexMultimodalDatasetSource,
)
)
instances_format: str = proto.Field(
proto.STRING,
number=1,
Expand Down Expand Up @@ -489,6 +502,11 @@ class OutputConfig(proto.Message):
[google.rpc.Status][google.rpc.Status] represented as a
STRUCT, and containing only ``code`` and ``message``.

This field is a member of `oneof`_ ``destination``.
vertex_multimodal_dataset_destination (google.cloud.aiplatform_v1.types.VertexMultimodalDatasetDestination):
The details for a Vertex Multimodal Dataset
that will be created for the output.

This field is a member of `oneof`_ ``destination``.
predictions_format (str):
Required. The format in which Vertex AI gives the
Expand All @@ -509,6 +527,14 @@ class OutputConfig(proto.Message):
oneof="destination",
message=io.BigQueryDestination,
)
vertex_multimodal_dataset_destination: io.VertexMultimodalDatasetDestination = (
proto.Field(
proto.MESSAGE,
number=6,
oneof="destination",
message=io.VertexMultimodalDatasetDestination,
)
)
predictions_format: str = proto.Field(
proto.STRING,
number=1,
Expand Down Expand Up @@ -537,6 +563,13 @@ class OutputInfo(proto.Message):
``bq://projectId.bqDatasetId`` format, into which the
prediction output is written.

This field is a member of `oneof`_ ``output_location``.
vertex_multimodal_dataset_name (str):
Output only. The resource name of the Vertex Managed Dataset
created, into which the prediction output is written.
Format:
``projects/{project}/locations/{location}/datasets/{dataset}``

This field is a member of `oneof`_ ``output_location``.
bigquery_output_table (str):
Output only. The name of the BigQuery table created, in
Expand All @@ -555,6 +588,11 @@ class OutputInfo(proto.Message):
number=2,
oneof="output_location",
)
vertex_multimodal_dataset_name: str = proto.Field(
proto.STRING,
number=5,
oneof="output_location",
)
bigquery_output_table: str = proto.Field(
proto.STRING,
number=4,
Expand Down
5 changes: 3 additions & 2 deletions google/cloud/aiplatform_v1/types/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,9 @@ class Dataset(proto.Message):

Attributes:
name (str):
Output only. Identifier. The resource name of
the Dataset.
Output only. Identifier. The resource name of the Dataset.
Format:
``projects/{project}/locations/{location}/datasets/{dataset}``
display_name (str):
Required. The user-defined name of the
Dataset. The name can be up to 128 characters
Expand Down
2 changes: 0 additions & 2 deletions google/cloud/aiplatform_v1/types/dataset_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ class CreateDatasetOperationMetadata(proto.Message):
class GetDatasetRequest(proto.Message):
r"""Request message for
[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset].
Next ID: 4

Attributes:
name (str):
Expand Down Expand Up @@ -489,7 +488,6 @@ class DeleteDatasetVersionRequest(proto.Message):
class GetDatasetVersionRequest(proto.Message):
r"""Request message for
[DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion].
Next ID: 4

Attributes:
name (str):
Expand Down
5 changes: 3 additions & 2 deletions google/cloud/aiplatform_v1/types/dataset_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@ class DatasetVersion(proto.Message):

Attributes:
name (str):
Output only. Identifier. The resource name of
the DatasetVersion.
Output only. Identifier. The resource name of the
DatasetVersion. Format:
``projects/{project}/locations/{location}/datasets/{dataset}/datasetVersions/{dataset_version}``
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this
DatasetVersion was created.
Expand Down
42 changes: 42 additions & 0 deletions google/cloud/aiplatform_v1/types/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
"GcsDestination",
"BigQuerySource",
"BigQueryDestination",
"VertexMultimodalDatasetSource",
"VertexMultimodalDatasetDestination",
"CsvDestination",
"TFRecordDestination",
"ContainerRegistryDestination",
Expand Down Expand Up @@ -152,6 +154,46 @@ class BigQueryDestination(proto.Message):
)


class VertexMultimodalDatasetSource(proto.Message):
r"""The Vertex Multimodal Dataset for the input content.

Attributes:
dataset_name (str):
Required. The resource name of the Vertex Dataset. Format:
``projects/{project}/locations/{location}/datasets/{dataset}``
"""

dataset_name: str = proto.Field(
proto.STRING,
number=1,
)


class VertexMultimodalDatasetDestination(proto.Message):
r"""The details for a Vertex Multimodal Dataset output.

Attributes:
bigquery_destination (google.cloud.aiplatform_v1.types.BigQueryDestination):
Optional. The destination of the underlying
BigQuery table that will be created for the
output Multimodal Dataset. If not specified, the
BigQuery table will be created in a default
BigQuery dataset.
display_name (str):
Optional. Display name of the output dataset.
"""

bigquery_destination: "BigQueryDestination" = proto.Field(
proto.MESSAGE,
number=1,
message="BigQueryDestination",
)
display_name: str = proto.Field(
proto.STRING,
number=2,
)


class CsvDestination(proto.Message):
r"""The storage details for CSV output content.

Expand Down
4 changes: 4 additions & 0 deletions google/cloud/aiplatform_v1beta1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -697,6 +697,8 @@
from .types.io import SharePointSources
from .types.io import SlackSource
from .types.io import TFRecordDestination
from .types.io import VertexMultimodalDatasetDestination
from .types.io import VertexMultimodalDatasetSource
from .types.job_service import CancelBatchPredictionJobRequest
from .types.job_service import CancelCustomJobRequest
from .types.job_service import CancelDataLabelingJobRequest
Expand Down Expand Up @@ -2664,6 +2666,8 @@ def _get_version(dependency_name):
"VeoTuningSpec",
"VertexAISearch",
"VertexAiSearchConfig",
"VertexMultimodalDatasetDestination",
"VertexMultimodalDatasetSource",
"VertexRagDataServiceClient",
"VertexRagServiceClient",
"VertexRagStore",
Expand Down
Loading
Loading