- 0.58.0 (latest)
- 0.57.0
- 0.56.0
- 0.55.0
- 0.54.0
- 0.53.0
- 0.52.0
- 0.51.0
- 0.50.0
- 0.49.0
- 0.48.0
- 0.47.0
- 0.46.0
- 0.45.0
- 0.44.0
- 0.43.0
- 0.42.0
- 0.41.0
- 0.40.0
- 0.39.0
- 0.38.0
- 0.37.0
- 0.36.0
- 0.35.0
- 0.34.0
- 0.33.0
- 0.32.0
- 0.31.0
- 0.30.0
- 0.29.0
- 0.28.0
- 0.27.0
- 0.26.0
- 0.25.0
- 0.24.0
- 0.23.0
- 0.22.0
- 0.21.0
- 0.20.0
- 0.19.0
- 0.18.0
- 0.17.0
- 0.16.0
- 0.15.0
- 0.14.0
- 0.13.0
- 0.12.0
- 0.11.0
- 0.10.0
- 0.9.1
- 0.8.0
- 0.7.0
- 0.6.0
- 0.5.0
- 0.4.0
- 0.3.0
- 0.2.0
- 0.1.0
Reference documentation and code samples for the Vertex AI V1 API class Google::Cloud::AIPlatform::V1::Model.
A trained machine learning Model.
Inherits
- Object
Extended By
- Google::Protobuf::MessageExts::ClassMethods
Includes
- Google::Protobuf::MessageExts
Methods
#artifact_uri
def artifact_uri() -> ::String
- (::String) — Immutable. The path to the directory containing the Model artifact and any of its supporting files. Not required for AutoML Models.
#artifact_uri=
def artifact_uri=(value) -> ::String
- value (::String) — Immutable. The path to the directory containing the Model artifact and any of its supporting files. Not required for AutoML Models.
- (::String) — Immutable. The path to the directory containing the Model artifact and any of its supporting files. Not required for AutoML Models.
#base_model_source
def base_model_source() -> ::Google::Cloud::AIPlatform::V1::Model::BaseModelSource
- (::Google::Cloud::AIPlatform::V1::Model::BaseModelSource) — Optional. User input field to specify the base model source. Currently it only supports specifing the Model Garden models and Genie models.
#base_model_source=
def base_model_source=(value) -> ::Google::Cloud::AIPlatform::V1::Model::BaseModelSource
- value (::Google::Cloud::AIPlatform::V1::Model::BaseModelSource) — Optional. User input field to specify the base model source. Currently it only supports specifing the Model Garden models and Genie models.
- (::Google::Cloud::AIPlatform::V1::Model::BaseModelSource) — Optional. User input field to specify the base model source. Currently it only supports specifing the Model Garden models and Genie models.
#container_spec
def container_spec() -> ::Google::Cloud::AIPlatform::V1::ModelContainerSpec
- (::Google::Cloud::AIPlatform::V1::ModelContainerSpec) — Input only. The specification of the container that is to be used when deploying this Model. The specification is ingested upon ModelService.UploadModel, and all binaries it contains are copied and stored internally by Vertex AI. Not required for AutoML Models.
#container_spec=
def container_spec=(value) -> ::Google::Cloud::AIPlatform::V1::ModelContainerSpec
- value (::Google::Cloud::AIPlatform::V1::ModelContainerSpec) — Input only. The specification of the container that is to be used when deploying this Model. The specification is ingested upon ModelService.UploadModel, and all binaries it contains are copied and stored internally by Vertex AI. Not required for AutoML Models.
- (::Google::Cloud::AIPlatform::V1::ModelContainerSpec) — Input only. The specification of the container that is to be used when deploying this Model. The specification is ingested upon ModelService.UploadModel, and all binaries it contains are copied and stored internally by Vertex AI. Not required for AutoML Models.
#create_time
def create_time() -> ::Google::Protobuf::Timestamp
- (::Google::Protobuf::Timestamp) — Output only. Timestamp when this Model was uploaded into Vertex AI.
#data_stats
def data_stats() -> ::Google::Cloud::AIPlatform::V1::Model::DataStats
-
(::Google::Cloud::AIPlatform::V1::Model::DataStats) — Stats of data used for training or evaluating the Model.
Only populated when the Model is trained by a TrainingPipeline with data_input_config.
#data_stats=
def data_stats=(value) -> ::Google::Cloud::AIPlatform::V1::Model::DataStats
-
value (::Google::Cloud::AIPlatform::V1::Model::DataStats) — Stats of data used for training or evaluating the Model.
Only populated when the Model is trained by a TrainingPipeline with data_input_config.
-
(::Google::Cloud::AIPlatform::V1::Model::DataStats) — Stats of data used for training or evaluating the Model.
Only populated when the Model is trained by a TrainingPipeline with data_input_config.
#deployed_models
def deployed_models() -> ::Array<::Google::Cloud::AIPlatform::V1::DeployedModelRef>
- (::Array<::Google::Cloud::AIPlatform::V1::DeployedModelRef>) — Output only. The pointers to DeployedModels created from this Model. Note that Model could have been deployed to Endpoints in different Locations.
#description
def description() -> ::String
- (::String) — The description of the Model.
#description=
def description=(value) -> ::String
- value (::String) — The description of the Model.
- (::String) — The description of the Model.
#display_name
def display_name() -> ::String
- (::String) — Required. The display name of the Model. The name can be up to 128 characters long and can consist of any UTF-8 characters.
#display_name=
def display_name=(value) -> ::String
- value (::String) — Required. The display name of the Model. The name can be up to 128 characters long and can consist of any UTF-8 characters.
- (::String) — Required. The display name of the Model. The name can be up to 128 characters long and can consist of any UTF-8 characters.
#encryption_spec
def encryption_spec() -> ::Google::Cloud::AIPlatform::V1::EncryptionSpec
- (::Google::Cloud::AIPlatform::V1::EncryptionSpec) — Customer-managed encryption key spec for a Model. If set, this Model and all sub-resources of this Model will be secured by this key.
#encryption_spec=
def encryption_spec=(value) -> ::Google::Cloud::AIPlatform::V1::EncryptionSpec
- value (::Google::Cloud::AIPlatform::V1::EncryptionSpec) — Customer-managed encryption key spec for a Model. If set, this Model and all sub-resources of this Model will be secured by this key.
- (::Google::Cloud::AIPlatform::V1::EncryptionSpec) — Customer-managed encryption key spec for a Model. If set, this Model and all sub-resources of this Model will be secured by this key.
#etag
def etag() -> ::String
- (::String) — Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.
#etag=
def etag=(value) -> ::String
- value (::String) — Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.
- (::String) — Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.
#explanation_spec
def explanation_spec() -> ::Google::Cloud::AIPlatform::V1::ExplanationSpec
-
(::Google::Cloud::AIPlatform::V1::ExplanationSpec) — The default explanation specification for this Model.
The Model can be used for [requesting explanation][google.cloud.aiplatform.v1.PredictionService.Explain] after being deployed if it is populated. The Model can be used for [batch explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation] if it is populated.
All fields of the explanation_spec can be overridden by explanation_spec of DeployModelRequest.deployed_model, or explanation_spec of BatchPredictionJob.
If the default explanation specification is not set for this Model, this Model can still be used for [requesting explanation][google.cloud.aiplatform.v1.PredictionService.Explain] by setting explanation_spec of DeployModelRequest.deployed_model and for [batch explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation] by setting explanation_spec of BatchPredictionJob.
#explanation_spec=
def explanation_spec=(value) -> ::Google::Cloud::AIPlatform::V1::ExplanationSpec
-
value (::Google::Cloud::AIPlatform::V1::ExplanationSpec) — The default explanation specification for this Model.
The Model can be used for [requesting explanation][google.cloud.aiplatform.v1.PredictionService.Explain] after being deployed if it is populated. The Model can be used for [batch explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation] if it is populated.
All fields of the explanation_spec can be overridden by explanation_spec of DeployModelRequest.deployed_model, or explanation_spec of BatchPredictionJob.
If the default explanation specification is not set for this Model, this Model can still be used for [requesting explanation][google.cloud.aiplatform.v1.PredictionService.Explain] by setting explanation_spec of DeployModelRequest.deployed_model and for [batch explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation] by setting explanation_spec of BatchPredictionJob.
-
(::Google::Cloud::AIPlatform::V1::ExplanationSpec) — The default explanation specification for this Model.
The Model can be used for [requesting explanation][google.cloud.aiplatform.v1.PredictionService.Explain] after being deployed if it is populated. The Model can be used for [batch explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation] if it is populated.
All fields of the explanation_spec can be overridden by explanation_spec of DeployModelRequest.deployed_model, or explanation_spec of BatchPredictionJob.
If the default explanation specification is not set for this Model, this Model can still be used for [requesting explanation][google.cloud.aiplatform.v1.PredictionService.Explain] by setting explanation_spec of DeployModelRequest.deployed_model and for [batch explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation] by setting explanation_spec of BatchPredictionJob.
#labels
def labels() -> ::Google::Protobuf::Map{::String => ::String}
-
(::Google::Protobuf::Map{::String => ::String}) — The labels with user-defined metadata to organize your Models.
Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed.
See https://goo.gl/xmQnxf for more information and examples of labels.
#labels=
def labels=(value) -> ::Google::Protobuf::Map{::String => ::String}
-
value (::Google::Protobuf::Map{::String => ::String}) — The labels with user-defined metadata to organize your Models.
Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed.
See https://goo.gl/xmQnxf for more information and examples of labels.
-
(::Google::Protobuf::Map{::String => ::String}) — The labels with user-defined metadata to organize your Models.
Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed.
See https://goo.gl/xmQnxf for more information and examples of labels.
#metadata
def metadata() -> ::Google::Protobuf::Value
- (::Google::Protobuf::Value) — Immutable. An additional information about the Model; the schema of the metadata can be found in metadata_schema. Unset if the Model does not have any additional information.
#metadata=
def metadata=(value) -> ::Google::Protobuf::Value
- value (::Google::Protobuf::Value) — Immutable. An additional information about the Model; the schema of the metadata can be found in metadata_schema. Unset if the Model does not have any additional information.
- (::Google::Protobuf::Value) — Immutable. An additional information about the Model; the schema of the metadata can be found in metadata_schema. Unset if the Model does not have any additional information.
#metadata_artifact
def metadata_artifact() -> ::String
-
(::String) — Output only. The resource name of the Artifact that was created in
MetadataStore when creating the Model. The Artifact resource name pattern
is
projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}
.
#metadata_schema_uri
def metadata_schema_uri() -> ::String
- (::String) — Immutable. Points to a YAML file stored on Google Cloud Storage describing additional information about the Model, that is specific to it. Unset if the Model does not have any additional information. The schema is defined as an OpenAPI 3.0.2 Schema Object. AutoML Models always have this field populated by Vertex AI, if no additional metadata is needed, this field is set to an empty string. Note: The URI given on output will be immutable and probably different, including the URI scheme, than the one given on input. The output URI will point to a location where the user only has a read access.
#metadata_schema_uri=
def metadata_schema_uri=(value) -> ::String
- value (::String) — Immutable. Points to a YAML file stored on Google Cloud Storage describing additional information about the Model, that is specific to it. Unset if the Model does not have any additional information. The schema is defined as an OpenAPI 3.0.2 Schema Object. AutoML Models always have this field populated by Vertex AI, if no additional metadata is needed, this field is set to an empty string. Note: The URI given on output will be immutable and probably different, including the URI scheme, than the one given on input. The output URI will point to a location where the user only has a read access.
- (::String) — Immutable. Points to a YAML file stored on Google Cloud Storage describing additional information about the Model, that is specific to it. Unset if the Model does not have any additional information. The schema is defined as an OpenAPI 3.0.2 Schema Object. AutoML Models always have this field populated by Vertex AI, if no additional metadata is needed, this field is set to an empty string. Note: The URI given on output will be immutable and probably different, including the URI scheme, than the one given on input. The output URI will point to a location where the user only has a read access.
#model_source_info
def model_source_info() -> ::Google::Cloud::AIPlatform::V1::ModelSourceInfo
- (::Google::Cloud::AIPlatform::V1::ModelSourceInfo) — Output only. Source of a model. It can either be automl training pipeline, custom training pipeline, BigQuery ML, or saved and tuned from Genie or Model Garden.
#name
def name() -> ::String
- (::String) — The resource name of the Model.
#name=
def name=(value) -> ::String
- value (::String) — The resource name of the Model.
- (::String) — The resource name of the Model.
#original_model_info
def original_model_info() -> ::Google::Cloud::AIPlatform::V1::Model::OriginalModelInfo
- (::Google::Cloud::AIPlatform::V1::Model::OriginalModelInfo) — Output only. If this Model is a copy of another Model, this contains info about the original.
#pipeline_job
def pipeline_job() -> ::String
- (::String) — Optional. This field is populated if the model is produced by a pipeline job.
#pipeline_job=
def pipeline_job=(value) -> ::String
- value (::String) — Optional. This field is populated if the model is produced by a pipeline job.
- (::String) — Optional. This field is populated if the model is produced by a pipeline job.
#predict_schemata
def predict_schemata() -> ::Google::Cloud::AIPlatform::V1::PredictSchemata
- (::Google::Cloud::AIPlatform::V1::PredictSchemata) — The schemata that describe formats of the Model's predictions and explanations as given and returned via PredictionService.Predict and PredictionService.Explain.
#predict_schemata=
def predict_schemata=(value) -> ::Google::Cloud::AIPlatform::V1::PredictSchemata
- value (::Google::Cloud::AIPlatform::V1::PredictSchemata) — The schemata that describe formats of the Model's predictions and explanations as given and returned via PredictionService.Predict and PredictionService.Explain.
- (::Google::Cloud::AIPlatform::V1::PredictSchemata) — The schemata that describe formats of the Model's predictions and explanations as given and returned via PredictionService.Predict and PredictionService.Explain.
#satisfies_pzi
def satisfies_pzi() -> ::Boolean
- (::Boolean) — Output only. Reserved for future use.
#satisfies_pzs
def satisfies_pzs() -> ::Boolean
- (::Boolean) — Output only. Reserved for future use.
#supported_deployment_resources_types
def supported_deployment_resources_types() -> ::Array<::Google::Cloud::AIPlatform::V1::Model::DeploymentResourcesType>
-
(::Array<::Google::Cloud::AIPlatform::V1::Model::DeploymentResourcesType>) — Output only. When this Model is deployed, its prediction resources are
described by the
prediction_resources
field of the Endpoint.deployed_models object. Because not all Models support all resource configuration types, the configuration types this Model supports are listed here. If no configuration types are listed, the Model cannot be deployed to an Endpoint and does not support online predictions (PredictionService.Predict or PredictionService.Explain). Such a Model can serve predictions by using a BatchPredictionJob, if it has at least one entry each in supported_input_storage_formats and supported_output_storage_formats.
#supported_export_formats
def supported_export_formats() -> ::Array<::Google::Cloud::AIPlatform::V1::Model::ExportFormat>
- (::Array<::Google::Cloud::AIPlatform::V1::Model::ExportFormat>) — Output only. The formats in which this Model may be exported. If empty, this Model is not available for export.
#supported_input_storage_formats
def supported_input_storage_formats() -> ::Array<::String>
-
(::Array<::String>) — Output only. The formats this Model supports in
BatchPredictionJob.input_config.
If
PredictSchemata.instance_schema_uri
exists, the instances should be given as per that schema.
The possible formats are:
jsonl
The JSON Lines format, where each instance is a single line. Uses GcsSource.csv
The CSV format, where each instance is a single comma-separated line. The first line in the file is the header, containing comma-separated field names. Uses GcsSource.tf-record
The TFRecord format, where each instance is a single record in tfrecord syntax. Uses GcsSource.tf-record-gzip
Similar totf-record
, but the file is gzipped. Uses GcsSource.bigquery
Each instance is a single row in BigQuery. Uses BigQuerySource.file-list
Each line of the file is the location of an instance to process, usesgcs_source
field of the InputConfig object.
If this Model doesn't support any of these formats it means it cannot be used with a BatchPredictionJob. However, if it has supported_deployment_resources_types, it could serve online predictions by using PredictionService.Predict or PredictionService.Explain.
#supported_output_storage_formats
def supported_output_storage_formats() -> ::Array<::String>
-
(::Array<::String>) — Output only. The formats this Model supports in
BatchPredictionJob.output_config.
If both
PredictSchemata.instance_schema_uri
and
PredictSchemata.prediction_schema_uri
exist, the predictions are returned together with their instances. In other
words, the prediction has the original instance data first, followed by the
actual prediction content (as per the schema).
The possible formats are:
jsonl
The JSON Lines format, where each prediction is a single line. Uses GcsDestination.csv
The CSV format, where each prediction is a single comma-separated line. The first line in the file is the header, containing comma-separated field names. Uses GcsDestination.bigquery
Each prediction is a single row in a BigQuery table, uses BigQueryDestination .
If this Model doesn't support any of these formats it means it cannot be used with a BatchPredictionJob. However, if it has supported_deployment_resources_types, it could serve online predictions by using PredictionService.Predict or PredictionService.Explain.
#training_pipeline
def training_pipeline() -> ::String
- (::String) — Output only. The resource name of the TrainingPipeline that uploaded this Model, if any.
#update_time
def update_time() -> ::Google::Protobuf::Timestamp
- (::Google::Protobuf::Timestamp) — Output only. Timestamp when this Model was most recently updated.
#version_aliases
def version_aliases() -> ::Array<::String>
-
(::Array<::String>) — User provided version aliases so that a model version can be referenced via
alias (i.e.
projects/{project}/locations/{location}/models/{model_id}@{version_alias}
instead of auto-generated version id (i.e.projects/{project}/locations/{location}/models/{model_id}@{version_id})
. The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9] to distinguish from version_id. A default version alias will be created for the first version of the model, and there must be exactly one default version alias for a model.
#version_aliases=
def version_aliases=(value) -> ::Array<::String>
-
value (::Array<::String>) — User provided version aliases so that a model version can be referenced via
alias (i.e.
projects/{project}/locations/{location}/models/{model_id}@{version_alias}
instead of auto-generated version id (i.e.projects/{project}/locations/{location}/models/{model_id}@{version_id})
. The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9] to distinguish from version_id. A default version alias will be created for the first version of the model, and there must be exactly one default version alias for a model.
-
(::Array<::String>) — User provided version aliases so that a model version can be referenced via
alias (i.e.
projects/{project}/locations/{location}/models/{model_id}@{version_alias}
instead of auto-generated version id (i.e.projects/{project}/locations/{location}/models/{model_id}@{version_id})
. The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9] to distinguish from version_id. A default version alias will be created for the first version of the model, and there must be exactly one default version alias for a model.
#version_create_time
def version_create_time() -> ::Google::Protobuf::Timestamp
- (::Google::Protobuf::Timestamp) — Output only. Timestamp when this version was created.
#version_description
def version_description() -> ::String
- (::String) — The description of this version.
#version_description=
def version_description=(value) -> ::String
- value (::String) — The description of this version.
- (::String) — The description of this version.
#version_id
def version_id() -> ::String
- (::String) — Output only. Immutable. The version ID of the model. A new version is committed when a new model version is uploaded or trained under an existing model id. It is an auto-incrementing decimal number in string representation.
#version_update_time
def version_update_time() -> ::Google::Protobuf::Timestamp
- (::Google::Protobuf::Timestamp) — Output only. Timestamp when this version was most recently updated.