public static final class BatchPredictionJob.OutputConfig.Builder extends com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder> implements BatchPredictionJob.OutputConfigOrBuilder
Configures the output of [BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob]. See [Model.supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats] for supported output formats, and how predictions are expressed via any of them.Protobuf type
google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig| Modifier and Type | Method and Description |
|---|---|
BatchPredictionJob.OutputConfig.Builder |
addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) |
BatchPredictionJob.OutputConfig |
build() |
BatchPredictionJob.OutputConfig |
buildPartial() |
BatchPredictionJob.OutputConfig.Builder |
clear() |
BatchPredictionJob.OutputConfig.Builder |
clearBigqueryDestination()
The BigQuery project or dataset location where the output is to be
written to.
|
BatchPredictionJob.OutputConfig.Builder |
clearDestination() |
BatchPredictionJob.OutputConfig.Builder |
clearField(com.google.protobuf.Descriptors.FieldDescriptor field) |
BatchPredictionJob.OutputConfig.Builder |
clearGcsDestination()
The Cloud Storage location of the directory where the output is
to be written to.
|
BatchPredictionJob.OutputConfig.Builder |
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) |
BatchPredictionJob.OutputConfig.Builder |
clearPredictionsFormat()
Required.
|
BatchPredictionJob.OutputConfig.Builder |
clone() |
BigQueryDestination |
getBigqueryDestination()
The BigQuery project or dataset location where the output is to be
written to.
|
BigQueryDestination.Builder |
getBigqueryDestinationBuilder()
The BigQuery project or dataset location where the output is to be
written to.
|
BigQueryDestinationOrBuilder |
getBigqueryDestinationOrBuilder()
The BigQuery project or dataset location where the output is to be
written to.
|
BatchPredictionJob.OutputConfig |
getDefaultInstanceForType() |
static com.google.protobuf.Descriptors.Descriptor |
getDescriptor() |
com.google.protobuf.Descriptors.Descriptor |
getDescriptorForType() |
BatchPredictionJob.OutputConfig.DestinationCase |
getDestinationCase() |
GcsDestination |
getGcsDestination()
The Cloud Storage location of the directory where the output is
to be written to.
|
GcsDestination.Builder |
getGcsDestinationBuilder()
The Cloud Storage location of the directory where the output is
to be written to.
|
GcsDestinationOrBuilder |
getGcsDestinationOrBuilder()
The Cloud Storage location of the directory where the output is
to be written to.
|
String |
getPredictionsFormat()
Required.
|
com.google.protobuf.ByteString |
getPredictionsFormatBytes()
Required.
|
boolean |
hasBigqueryDestination()
The BigQuery project or dataset location where the output is to be
written to.
|
boolean |
hasGcsDestination()
The Cloud Storage location of the directory where the output is
to be written to.
|
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable |
internalGetFieldAccessorTable() |
boolean |
isInitialized() |
BatchPredictionJob.OutputConfig.Builder |
mergeBigqueryDestination(BigQueryDestination value)
The BigQuery project or dataset location where the output is to be
written to.
|
BatchPredictionJob.OutputConfig.Builder |
mergeFrom(BatchPredictionJob.OutputConfig other) |
BatchPredictionJob.OutputConfig.Builder |
mergeFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
BatchPredictionJob.OutputConfig.Builder |
mergeFrom(com.google.protobuf.Message other) |
BatchPredictionJob.OutputConfig.Builder |
mergeGcsDestination(GcsDestination value)
The Cloud Storage location of the directory where the output is
to be written to.
|
BatchPredictionJob.OutputConfig.Builder |
mergeUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields) |
BatchPredictionJob.OutputConfig.Builder |
setBigqueryDestination(BigQueryDestination.Builder builderForValue)
The BigQuery project or dataset location where the output is to be
written to.
|
BatchPredictionJob.OutputConfig.Builder |
setBigqueryDestination(BigQueryDestination value)
The BigQuery project or dataset location where the output is to be
written to.
|
BatchPredictionJob.OutputConfig.Builder |
setField(com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) |
BatchPredictionJob.OutputConfig.Builder |
setGcsDestination(GcsDestination.Builder builderForValue)
The Cloud Storage location of the directory where the output is
to be written to.
|
BatchPredictionJob.OutputConfig.Builder |
setGcsDestination(GcsDestination value)
The Cloud Storage location of the directory where the output is
to be written to.
|
BatchPredictionJob.OutputConfig.Builder |
setPredictionsFormat(String value)
Required.
|
BatchPredictionJob.OutputConfig.Builder |
setPredictionsFormatBytes(com.google.protobuf.ByteString value)
Required.
|
BatchPredictionJob.OutputConfig.Builder |
setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field,
int index,
Object value) |
BatchPredictionJob.OutputConfig.Builder |
setUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields) |
getAllFields, getField, getFieldBuilder, getOneofFieldDescriptor, getParentForChildren, getRepeatedField, getRepeatedFieldBuilder, getRepeatedFieldCount, getUnknownFields, getUnknownFieldSetBuilder, hasField, hasOneof, internalGetMapField, internalGetMapFieldReflection, internalGetMutableMapField, internalGetMutableMapFieldReflection, isClean, markClean, mergeUnknownLengthDelimitedField, mergeUnknownVarintField, newBuilderForField, onBuilt, onChanged, parseUnknownField, setUnknownFieldSetBuilder, setUnknownFieldsProto3findInitializationErrors, getInitializationErrorString, internalMergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, newUninitializedMessageException, toStringaddAll, addAll, mergeDelimitedFrom, mergeDelimitedFrom, newUninitializedMessageExceptionequals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, waitpublic static final com.google.protobuf.Descriptors.Descriptor getDescriptor()
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
internalGetFieldAccessorTable in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder clear()
clear in interface com.google.protobuf.Message.Builderclear in interface com.google.protobuf.MessageLite.Builderclear in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()
getDescriptorForType in interface com.google.protobuf.Message.BuildergetDescriptorForType in interface com.google.protobuf.MessageOrBuildergetDescriptorForType in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig getDefaultInstanceForType()
getDefaultInstanceForType in interface com.google.protobuf.MessageLiteOrBuildergetDefaultInstanceForType in interface com.google.protobuf.MessageOrBuilderpublic BatchPredictionJob.OutputConfig build()
build in interface com.google.protobuf.Message.Builderbuild in interface com.google.protobuf.MessageLite.Builderpublic BatchPredictionJob.OutputConfig buildPartial()
buildPartial in interface com.google.protobuf.Message.BuilderbuildPartial in interface com.google.protobuf.MessageLite.Builderpublic BatchPredictionJob.OutputConfig.Builder clone()
clone in interface com.google.protobuf.Message.Builderclone in interface com.google.protobuf.MessageLite.Builderclone in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value)
setField in interface com.google.protobuf.Message.BuildersetField in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field)
clearField in interface com.google.protobuf.Message.BuilderclearField in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)
clearOneof in interface com.google.protobuf.Message.BuilderclearOneof in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value)
setRepeatedField in interface com.google.protobuf.Message.BuildersetRepeatedField in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value)
addRepeatedField in interface com.google.protobuf.Message.BuilderaddRepeatedField in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder mergeFrom(com.google.protobuf.Message other)
mergeFrom in interface com.google.protobuf.Message.BuildermergeFrom in class com.google.protobuf.AbstractMessage.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder mergeFrom(BatchPredictionJob.OutputConfig other)
public final boolean isInitialized()
isInitialized in interface com.google.protobuf.MessageLiteOrBuilderisInitialized in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public BatchPredictionJob.OutputConfig.Builder mergeFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
mergeFrom in interface com.google.protobuf.Message.BuildermergeFrom in interface com.google.protobuf.MessageLite.BuildermergeFrom in class com.google.protobuf.AbstractMessage.Builder<BatchPredictionJob.OutputConfig.Builder>IOExceptionpublic BatchPredictionJob.OutputConfig.DestinationCase getDestinationCase()
getDestinationCase in interface BatchPredictionJob.OutputConfigOrBuilderpublic BatchPredictionJob.OutputConfig.Builder clearDestination()
public boolean hasGcsDestination()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;hasGcsDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic GcsDestination getGcsDestination()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;getGcsDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic BatchPredictionJob.OutputConfig.Builder setGcsDestination(GcsDestination value)
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;public BatchPredictionJob.OutputConfig.Builder setGcsDestination(GcsDestination.Builder builderForValue)
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;public BatchPredictionJob.OutputConfig.Builder mergeGcsDestination(GcsDestination value)
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;public BatchPredictionJob.OutputConfig.Builder clearGcsDestination()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;public GcsDestination.Builder getGcsDestinationBuilder()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;public GcsDestinationOrBuilder getGcsDestinationOrBuilder()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;getGcsDestinationOrBuilder in interface BatchPredictionJob.OutputConfigOrBuilderpublic boolean hasBigqueryDestination()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;hasBigqueryDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic BigQueryDestination getBigqueryDestination()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;getBigqueryDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic BatchPredictionJob.OutputConfig.Builder setBigqueryDestination(BigQueryDestination value)
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;public BatchPredictionJob.OutputConfig.Builder setBigqueryDestination(BigQueryDestination.Builder builderForValue)
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;public BatchPredictionJob.OutputConfig.Builder mergeBigqueryDestination(BigQueryDestination value)
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;public BatchPredictionJob.OutputConfig.Builder clearBigqueryDestination()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;public BigQueryDestination.Builder getBigqueryDestinationBuilder()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;public BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;getBigqueryDestinationOrBuilder in interface BatchPredictionJob.OutputConfigOrBuilderpublic String getPredictionsFormat()
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];getPredictionsFormat in interface BatchPredictionJob.OutputConfigOrBuilderpublic com.google.protobuf.ByteString getPredictionsFormatBytes()
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];getPredictionsFormatBytes in interface BatchPredictionJob.OutputConfigOrBuilderpublic BatchPredictionJob.OutputConfig.Builder setPredictionsFormat(String value)
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];value - The predictionsFormat to set.public BatchPredictionJob.OutputConfig.Builder clearPredictionsFormat()
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];public BatchPredictionJob.OutputConfig.Builder setPredictionsFormatBytes(com.google.protobuf.ByteString value)
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];value - The bytes for predictionsFormat to set.public final BatchPredictionJob.OutputConfig.Builder setUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields)
setUnknownFields in interface com.google.protobuf.Message.BuildersetUnknownFields in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>public final BatchPredictionJob.OutputConfig.Builder mergeUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields)
mergeUnknownFields in interface com.google.protobuf.Message.BuildermergeUnknownFields in class com.google.protobuf.GeneratedMessageV3.Builder<BatchPredictionJob.OutputConfig.Builder>Copyright © 2024 Google LLC. All rights reserved.