public static final class BatchPredictionJob.OutputConfig extends com.google.protobuf.GeneratedMessageV3 implements BatchPredictionJob.OutputConfigOrBuilder
Configures the output of [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. See [Model.supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats] for supported output formats, and how predictions are expressed via any of them.Protobuf type
google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig| Modifier and Type | Class and Description |
|---|---|
static class |
BatchPredictionJob.OutputConfig.Builder
Configures the output of
[BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
|
static class |
BatchPredictionJob.OutputConfig.DestinationCase |
com.google.protobuf.GeneratedMessageV3.BuilderParent, com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<MessageT extends com.google.protobuf.GeneratedMessageV3.ExtendableMessage<MessageT>,BuilderT extends com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<MessageT,BuilderT>>, com.google.protobuf.GeneratedMessageV3.ExtendableMessage<MessageT extends com.google.protobuf.GeneratedMessageV3.ExtendableMessage<MessageT>>, com.google.protobuf.GeneratedMessageV3.ExtendableMessageOrBuilder<MessageT extends com.google.protobuf.GeneratedMessageV3.ExtendableMessage<MessageT>>, com.google.protobuf.GeneratedMessageV3.FieldAccessorTable, com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter| Modifier and Type | Field and Description |
|---|---|
static int |
BIGQUERY_DESTINATION_FIELD_NUMBER |
static int |
GCS_DESTINATION_FIELD_NUMBER |
static int |
PREDICTIONS_FORMAT_FIELD_NUMBER |
| Modifier and Type | Method and Description |
|---|---|
boolean |
equals(Object obj) |
BigQueryDestination |
getBigqueryDestination()
The BigQuery project or dataset location where the output is to be
written to.
|
BigQueryDestinationOrBuilder |
getBigqueryDestinationOrBuilder()
The BigQuery project or dataset location where the output is to be
written to.
|
static BatchPredictionJob.OutputConfig |
getDefaultInstance() |
BatchPredictionJob.OutputConfig |
getDefaultInstanceForType() |
static com.google.protobuf.Descriptors.Descriptor |
getDescriptor() |
BatchPredictionJob.OutputConfig.DestinationCase |
getDestinationCase() |
GcsDestination |
getGcsDestination()
The Cloud Storage location of the directory where the output is
to be written to.
|
GcsDestinationOrBuilder |
getGcsDestinationOrBuilder()
The Cloud Storage location of the directory where the output is
to be written to.
|
com.google.protobuf.Parser<BatchPredictionJob.OutputConfig> |
getParserForType() |
String |
getPredictionsFormat()
Required.
|
com.google.protobuf.ByteString |
getPredictionsFormatBytes()
Required.
|
int |
getSerializedSize() |
boolean |
hasBigqueryDestination()
The BigQuery project or dataset location where the output is to be
written to.
|
boolean |
hasGcsDestination()
The Cloud Storage location of the directory where the output is
to be written to.
|
int |
hashCode() |
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable |
internalGetFieldAccessorTable() |
boolean |
isInitialized() |
static BatchPredictionJob.OutputConfig.Builder |
newBuilder() |
static BatchPredictionJob.OutputConfig.Builder |
newBuilder(BatchPredictionJob.OutputConfig prototype) |
BatchPredictionJob.OutputConfig.Builder |
newBuilderForType() |
protected BatchPredictionJob.OutputConfig.Builder |
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) |
protected Object |
newInstance(com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter unused) |
static BatchPredictionJob.OutputConfig |
parseDelimitedFrom(InputStream input) |
static BatchPredictionJob.OutputConfig |
parseDelimitedFrom(InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static BatchPredictionJob.OutputConfig |
parseFrom(byte[] data) |
static BatchPredictionJob.OutputConfig |
parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static BatchPredictionJob.OutputConfig |
parseFrom(ByteBuffer data) |
static BatchPredictionJob.OutputConfig |
parseFrom(ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static BatchPredictionJob.OutputConfig |
parseFrom(com.google.protobuf.ByteString data) |
static BatchPredictionJob.OutputConfig |
parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static BatchPredictionJob.OutputConfig |
parseFrom(com.google.protobuf.CodedInputStream input) |
static BatchPredictionJob.OutputConfig |
parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static BatchPredictionJob.OutputConfig |
parseFrom(InputStream input) |
static BatchPredictionJob.OutputConfig |
parseFrom(InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static com.google.protobuf.Parser<BatchPredictionJob.OutputConfig> |
parser() |
BatchPredictionJob.OutputConfig.Builder |
toBuilder() |
void |
writeTo(com.google.protobuf.CodedOutputStream output) |
canUseUnsafe, computeStringSize, computeStringSizeNoTag, emptyBooleanList, emptyDoubleList, emptyFloatList, emptyIntList, emptyList, emptyLongList, getAllFields, getDescriptorForType, getField, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof, internalGetMapField, internalGetMapFieldReflection, isStringEmpty, makeExtensionsImmutable, makeMutableCopy, makeMutableCopy, mergeFromAndMakeImmutableInternal, mutableCopy, mutableCopy, mutableCopy, mutableCopy, mutableCopy, newBooleanList, newBuilderForType, newDoubleList, newFloatList, newIntList, newLongList, parseDelimitedWithIOException, parseDelimitedWithIOException, parseUnknownField, parseUnknownFieldProto3, parseWithIOException, parseWithIOException, parseWithIOException, parseWithIOException, serializeBooleanMapTo, serializeIntegerMapTo, serializeLongMapTo, serializeStringMapTo, writeReplace, writeString, writeStringNoTagfindInitializationErrors, getInitializationErrorString, hashBoolean, hashEnum, hashEnumList, hashFields, hashLong, toStringaddAll, addAll, checkByteStringIsUtf8, toByteArray, toByteString, writeDelimitedTo, writeToclone, finalize, getClass, notify, notifyAll, wait, wait, waitpublic static final int GCS_DESTINATION_FIELD_NUMBER
public static final int BIGQUERY_DESTINATION_FIELD_NUMBER
public static final int PREDICTIONS_FORMAT_FIELD_NUMBER
protected Object newInstance(com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter unused)
newInstance in class com.google.protobuf.GeneratedMessageV3public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
internalGetFieldAccessorTable in class com.google.protobuf.GeneratedMessageV3public BatchPredictionJob.OutputConfig.DestinationCase getDestinationCase()
getDestinationCase in interface BatchPredictionJob.OutputConfigOrBuilderpublic boolean hasGcsDestination()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;hasGcsDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic GcsDestination getGcsDestination()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;getGcsDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic GcsDestinationOrBuilder getGcsDestinationOrBuilder()
The Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is `prediction-<model-display-name>-<job-create-time>`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files `predictions_0001.<extension>`, `predictions_0002.<extension>`, ..., `predictions_N.<extension>` are created where `<extension>` depends on chosen [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] schemata defined then each such file contains predictions as per the [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. If prediction for any instance failed (partially or completely), then an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., `errors_N.<extension>` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional `error` field which as value has [google.rpc.Status][google.rpc.Status] containing only `code` and `message` fields.
.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;getGcsDestinationOrBuilder in interface BatchPredictionJob.OutputConfigOrBuilderpublic boolean hasBigqueryDestination()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;hasBigqueryDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic BigQueryDestination getBigqueryDestination()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;getBigqueryDestination in interface BatchPredictionJob.OutputConfigOrBuilderpublic BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()
The BigQuery project or dataset location where the output is to be written to. If project is provided, a new dataset is created with name `prediction_<model-display-name>_<job-create-time>` where <model-display-name> is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, `predictions`, and `errors`. If the Model has both [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] and [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] schemata defined then the tables have columns as follows: The `predictions` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The `errors` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only `code` and `message`.
.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;getBigqueryDestinationOrBuilder in interface BatchPredictionJob.OutputConfigOrBuilderpublic String getPredictionsFormat()
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];getPredictionsFormat in interface BatchPredictionJob.OutputConfigOrBuilderpublic com.google.protobuf.ByteString getPredictionsFormatBytes()
Required. The format in which Vertex AI gives the predictions, must be one of the [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats].
string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];getPredictionsFormatBytes in interface BatchPredictionJob.OutputConfigOrBuilderpublic final boolean isInitialized()
isInitialized in interface com.google.protobuf.MessageLiteOrBuilderisInitialized in class com.google.protobuf.GeneratedMessageV3public void writeTo(com.google.protobuf.CodedOutputStream output)
throws IOException
writeTo in interface com.google.protobuf.MessageLitewriteTo in class com.google.protobuf.GeneratedMessageV3IOExceptionpublic int getSerializedSize()
getSerializedSize in interface com.google.protobuf.MessageLitegetSerializedSize in class com.google.protobuf.GeneratedMessageV3public boolean equals(Object obj)
equals in interface com.google.protobuf.Messageequals in class com.google.protobuf.AbstractMessagepublic int hashCode()
hashCode in interface com.google.protobuf.MessagehashCode in class com.google.protobuf.AbstractMessagepublic static BatchPredictionJob.OutputConfig parseFrom(ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(InputStream input) throws IOException
IOExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
IOExceptionpublic static BatchPredictionJob.OutputConfig parseDelimitedFrom(InputStream input) throws IOException
IOExceptionpublic static BatchPredictionJob.OutputConfig parseDelimitedFrom(InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
IOExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(com.google.protobuf.CodedInputStream input) throws IOException
IOExceptionpublic static BatchPredictionJob.OutputConfig parseFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
IOExceptionpublic BatchPredictionJob.OutputConfig.Builder newBuilderForType()
newBuilderForType in interface com.google.protobuf.MessagenewBuilderForType in interface com.google.protobuf.MessageLitepublic static BatchPredictionJob.OutputConfig.Builder newBuilder()
public static BatchPredictionJob.OutputConfig.Builder newBuilder(BatchPredictionJob.OutputConfig prototype)
public BatchPredictionJob.OutputConfig.Builder toBuilder()
toBuilder in interface com.google.protobuf.MessagetoBuilder in interface com.google.protobuf.MessageLiteprotected BatchPredictionJob.OutputConfig.Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)
newBuilderForType in class com.google.protobuf.GeneratedMessageV3public static BatchPredictionJob.OutputConfig getDefaultInstance()
public static com.google.protobuf.Parser<BatchPredictionJob.OutputConfig> parser()
public com.google.protobuf.Parser<BatchPredictionJob.OutputConfig> getParserForType()
getParserForType in interface com.google.protobuf.MessagegetParserForType in interface com.google.protobuf.MessageLitegetParserForType in class com.google.protobuf.GeneratedMessageV3public BatchPredictionJob.OutputConfig getDefaultInstanceForType()
getDefaultInstanceForType in interface com.google.protobuf.MessageLiteOrBuildergetDefaultInstanceForType in interface com.google.protobuf.MessageOrBuilderCopyright © 2024 Google LLC. All rights reserved.