diff --git a/java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java b/java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java index 2fdc4bde2013..a3da34decd27 100644 --- a/java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java +++ b/java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java @@ -1547,6 +1547,7 @@ public void createBatchPredictionJobTest() throws Exception { .setModelVersionId("modelVersionId-2006125846") .setUnmanagedContainerModel(UnmanagedContainerModel.newBuilder().build()) .setInputConfig(BatchPredictionJob.InputConfig.newBuilder().build()) + .setInstanceConfig(BatchPredictionJob.InstanceConfig.newBuilder().build()) .setModelParameters(Value.newBuilder().setBoolValue(true).build()) .setOutputConfig(BatchPredictionJob.OutputConfig.newBuilder().build()) .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) @@ -1618,6 +1619,7 @@ public void createBatchPredictionJobTest2() throws Exception { .setModelVersionId("modelVersionId-2006125846") .setUnmanagedContainerModel(UnmanagedContainerModel.newBuilder().build()) .setInputConfig(BatchPredictionJob.InputConfig.newBuilder().build()) + .setInstanceConfig(BatchPredictionJob.InstanceConfig.newBuilder().build()) .setModelParameters(Value.newBuilder().setBoolValue(true).build()) .setOutputConfig(BatchPredictionJob.OutputConfig.newBuilder().build()) .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) @@ -1689,6 +1691,7 @@ public void getBatchPredictionJobTest() throws Exception { .setModelVersionId("modelVersionId-2006125846") .setUnmanagedContainerModel(UnmanagedContainerModel.newBuilder().build()) .setInputConfig(BatchPredictionJob.InputConfig.newBuilder().build()) + .setInstanceConfig(BatchPredictionJob.InstanceConfig.newBuilder().build()) .setModelParameters(Value.newBuilder().setBoolValue(true).build()) .setOutputConfig(BatchPredictionJob.OutputConfig.newBuilder().build()) .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) @@ -1759,6 +1762,7 @@ public void getBatchPredictionJobTest2() throws Exception { .setModelVersionId("modelVersionId-2006125846") .setUnmanagedContainerModel(UnmanagedContainerModel.newBuilder().build()) .setInputConfig(BatchPredictionJob.InputConfig.newBuilder().build()) + .setInstanceConfig(BatchPredictionJob.InstanceConfig.newBuilder().build()) .setModelParameters(Value.newBuilder().setBoolValue(true).build()) .setOutputConfig(BatchPredictionJob.OutputConfig.newBuilder().build()) .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) diff --git a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java index 6fe26fc9ba88..4fdac5056bc3 100644 --- a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java +++ b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java @@ -1563,6 +1563,2094 @@ public com.google.protobuf.Parser getParserForType() { } } + public interface InstanceConfigOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * The format of the instance that the Model accepts. Vertex AI will
+     * convert compatible
+     * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+     * to the specified format.
+     * Supported values are:
+     * * `object`: Each input is converted to JSON object format.
+     *     * For `bigquery`, each row is converted to an object.
+     *     * For `jsonl`, each line of the JSONL input must be an object.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * * `array`: Each input is converted to JSON array format.
+     *     * For `bigquery`, each row is converted to an array. The order
+     *       of columns is determined by the BigQuery column order, unless
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * For `jsonl`, if each line of the JSONL input is an object,
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * If not specified, Vertex AI converts the batch prediction input as
+     * follows:
+     *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+     *    order of columns is the same as defined in the file or table, unless
+     *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *  * For `jsonl`, the prediction instance format is determined by
+     *    each line of the input.
+     *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+     *    an object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the record.
+     *  * For `file-list`, each file in the list will be converted to an
+     *    object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the file.
+     * 
+ * + * string instance_type = 1; + * + * @return The instanceType. + */ + java.lang.String getInstanceType(); + /** + * + * + *
+     * The format of the instance that the Model accepts. Vertex AI will
+     * convert compatible
+     * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+     * to the specified format.
+     * Supported values are:
+     * * `object`: Each input is converted to JSON object format.
+     *     * For `bigquery`, each row is converted to an object.
+     *     * For `jsonl`, each line of the JSONL input must be an object.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * * `array`: Each input is converted to JSON array format.
+     *     * For `bigquery`, each row is converted to an array. The order
+     *       of columns is determined by the BigQuery column order, unless
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * For `jsonl`, if each line of the JSONL input is an object,
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * If not specified, Vertex AI converts the batch prediction input as
+     * follows:
+     *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+     *    order of columns is the same as defined in the file or table, unless
+     *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *  * For `jsonl`, the prediction instance format is determined by
+     *    each line of the input.
+     *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+     *    an object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the record.
+     *  * For `file-list`, each file in the list will be converted to an
+     *    object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the file.
+     * 
+ * + * string instance_type = 1; + * + * @return The bytes for instanceType. + */ + com.google.protobuf.ByteString getInstanceTypeBytes(); + + /** + * + * + *
+     * The name of the field that is considered as a key.
+     * The values identified by the key field is not included in the transformed
+     * instances that is sent to the Model. This is similar to
+     * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+     * the batch prediction output will not include the instances. Instead the
+     * output will only include the value of the key field, in a field named
+     * `key` in the output:
+     *  * For `jsonl` output format, the output will have a `key` field
+     *    instead of the `instance` field.
+     *  * For `csv`/`bigquery` output format, the output will have have a `key`
+     *    column instead of the instance feature columns.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * string key_field = 2; + * + * @return The keyField. + */ + java.lang.String getKeyField(); + /** + * + * + *
+     * The name of the field that is considered as a key.
+     * The values identified by the key field is not included in the transformed
+     * instances that is sent to the Model. This is similar to
+     * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+     * the batch prediction output will not include the instances. Instead the
+     * output will only include the value of the key field, in a field named
+     * `key` in the output:
+     *  * For `jsonl` output format, the output will have a `key` field
+     *    instead of the `instance` field.
+     *  * For `csv`/`bigquery` output format, the output will have have a `key`
+     *    column instead of the instance feature columns.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * string key_field = 2; + * + * @return The bytes for keyField. + */ + com.google.protobuf.ByteString getKeyFieldBytes(); + + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @return A list containing the includedFields. + */ + java.util.List getIncludedFieldsList(); + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @return The count of includedFields. + */ + int getIncludedFieldsCount(); + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @param index The index of the element to return. + * @return The includedFields at the given index. + */ + java.lang.String getIncludedFields(int index); + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @param index The index of the value to return. + * @return The bytes of the includedFields at the given index. + */ + com.google.protobuf.ByteString getIncludedFieldsBytes(int index); + + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @return A list containing the excludedFields. + */ + java.util.List getExcludedFieldsList(); + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @return The count of excludedFields. + */ + int getExcludedFieldsCount(); + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index of the element to return. + * @return The excludedFields at the given index. + */ + java.lang.String getExcludedFields(int index); + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index of the value to return. + * @return The bytes of the excludedFields at the given index. + */ + com.google.protobuf.ByteString getExcludedFieldsBytes(int index); + } + /** + * + * + *
+   * Configuration defining how to transform batch prediction input instances to
+   * the instances that the Model accepts.
+   * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig} + */ + public static final class InstanceConfig extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) + InstanceConfigOrBuilder { + private static final long serialVersionUID = 0L; + // Use InstanceConfig.newBuilder() to construct. + private InstanceConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private InstanceConfig() { + instanceType_ = ""; + keyField_ = ""; + includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new InstanceConfig(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.class, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder.class); + } + + public static final int INSTANCE_TYPE_FIELD_NUMBER = 1; + private volatile java.lang.Object instanceType_; + /** + * + * + *
+     * The format of the instance that the Model accepts. Vertex AI will
+     * convert compatible
+     * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+     * to the specified format.
+     * Supported values are:
+     * * `object`: Each input is converted to JSON object format.
+     *     * For `bigquery`, each row is converted to an object.
+     *     * For `jsonl`, each line of the JSONL input must be an object.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * * `array`: Each input is converted to JSON array format.
+     *     * For `bigquery`, each row is converted to an array. The order
+     *       of columns is determined by the BigQuery column order, unless
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * For `jsonl`, if each line of the JSONL input is an object,
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * If not specified, Vertex AI converts the batch prediction input as
+     * follows:
+     *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+     *    order of columns is the same as defined in the file or table, unless
+     *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *  * For `jsonl`, the prediction instance format is determined by
+     *    each line of the input.
+     *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+     *    an object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the record.
+     *  * For `file-list`, each file in the list will be converted to an
+     *    object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the file.
+     * 
+ * + * string instance_type = 1; + * + * @return The instanceType. + */ + @java.lang.Override + public java.lang.String getInstanceType() { + java.lang.Object ref = instanceType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + instanceType_ = s; + return s; + } + } + /** + * + * + *
+     * The format of the instance that the Model accepts. Vertex AI will
+     * convert compatible
+     * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+     * to the specified format.
+     * Supported values are:
+     * * `object`: Each input is converted to JSON object format.
+     *     * For `bigquery`, each row is converted to an object.
+     *     * For `jsonl`, each line of the JSONL input must be an object.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * * `array`: Each input is converted to JSON array format.
+     *     * For `bigquery`, each row is converted to an array. The order
+     *       of columns is determined by the BigQuery column order, unless
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * For `jsonl`, if each line of the JSONL input is an object,
+     *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+     *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+     *       `tf-record-gzip`.
+     * If not specified, Vertex AI converts the batch prediction input as
+     * follows:
+     *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+     *    order of columns is the same as defined in the file or table, unless
+     *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+     *  * For `jsonl`, the prediction instance format is determined by
+     *    each line of the input.
+     *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+     *    an object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the record.
+     *  * For `file-list`, each file in the list will be converted to an
+     *    object in the format of `{"b64": <value>}`, where `<value>` is
+     *    the Base64-encoded string of the content of the file.
+     * 
+ * + * string instance_type = 1; + * + * @return The bytes for instanceType. + */ + @java.lang.Override + public com.google.protobuf.ByteString getInstanceTypeBytes() { + java.lang.Object ref = instanceType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + instanceType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int KEY_FIELD_FIELD_NUMBER = 2; + private volatile java.lang.Object keyField_; + /** + * + * + *
+     * The name of the field that is considered as a key.
+     * The values identified by the key field is not included in the transformed
+     * instances that is sent to the Model. This is similar to
+     * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+     * the batch prediction output will not include the instances. Instead the
+     * output will only include the value of the key field, in a field named
+     * `key` in the output:
+     *  * For `jsonl` output format, the output will have a `key` field
+     *    instead of the `instance` field.
+     *  * For `csv`/`bigquery` output format, the output will have have a `key`
+     *    column instead of the instance feature columns.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * string key_field = 2; + * + * @return The keyField. + */ + @java.lang.Override + public java.lang.String getKeyField() { + java.lang.Object ref = keyField_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + keyField_ = s; + return s; + } + } + /** + * + * + *
+     * The name of the field that is considered as a key.
+     * The values identified by the key field is not included in the transformed
+     * instances that is sent to the Model. This is similar to
+     * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+     * the batch prediction output will not include the instances. Instead the
+     * output will only include the value of the key field, in a field named
+     * `key` in the output:
+     *  * For `jsonl` output format, the output will have a `key` field
+     *    instead of the `instance` field.
+     *  * For `csv`/`bigquery` output format, the output will have have a `key`
+     *    column instead of the instance feature columns.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * string key_field = 2; + * + * @return The bytes for keyField. + */ + @java.lang.Override + public com.google.protobuf.ByteString getKeyFieldBytes() { + java.lang.Object ref = keyField_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + keyField_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INCLUDED_FIELDS_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList includedFields_; + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @return A list containing the includedFields. + */ + public com.google.protobuf.ProtocolStringList getIncludedFieldsList() { + return includedFields_; + } + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @return The count of includedFields. + */ + public int getIncludedFieldsCount() { + return includedFields_.size(); + } + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @param index The index of the element to return. + * @return The includedFields at the given index. + */ + public java.lang.String getIncludedFields(int index) { + return includedFields_.get(index); + } + /** + * + * + *
+     * Fields that will be included in the prediction instance that is
+     * sent to the Model.
+     * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+     * included_fields also determines the order of the values in the array.
+     * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string included_fields = 3; + * + * @param index The index of the value to return. + * @return The bytes of the includedFields at the given index. + */ + public com.google.protobuf.ByteString getIncludedFieldsBytes(int index) { + return includedFields_.getByteString(index); + } + + public static final int EXCLUDED_FIELDS_FIELD_NUMBER = 4; + private com.google.protobuf.LazyStringList excludedFields_; + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @return A list containing the excludedFields. + */ + public com.google.protobuf.ProtocolStringList getExcludedFieldsList() { + return excludedFields_; + } + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @return The count of excludedFields. + */ + public int getExcludedFieldsCount() { + return excludedFields_.size(); + } + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index of the element to return. + * @return The excludedFields at the given index. + */ + public java.lang.String getExcludedFields(int index) { + return excludedFields_.get(index); + } + /** + * + * + *
+     * Fields that will be excluded in the prediction instance that is
+     * sent to the Model.
+     * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+     * is not specified.
+     * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+     * The input must be JSONL with objects at each line, CSV, BigQuery
+     * or TfRecord.
+     * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index of the value to return. + * @return The bytes of the excludedFields at the given index. + */ + public com.google.protobuf.ByteString getExcludedFieldsBytes(int index) { + return excludedFields_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceType_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceType_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyField_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keyField_); + } + for (int i = 0; i < includedFields_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, includedFields_.getRaw(i)); + } + for (int i = 0; i < excludedFields_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, excludedFields_.getRaw(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceType_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceType_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyField_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, keyField_); + } + { + int dataSize = 0; + for (int i = 0; i < includedFields_.size(); i++) { + dataSize += computeStringSizeNoTag(includedFields_.getRaw(i)); + } + size += dataSize; + size += 1 * getIncludedFieldsList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < excludedFields_.size(); i++) { + dataSize += computeStringSizeNoTag(excludedFields_.getRaw(i)); + } + size += dataSize; + size += 1 * getExcludedFieldsList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig other = + (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) obj; + + if (!getInstanceType().equals(other.getInstanceType())) return false; + if (!getKeyField().equals(other.getKeyField())) return false; + if (!getIncludedFieldsList().equals(other.getIncludedFieldsList())) return false; + if (!getExcludedFieldsList().equals(other.getExcludedFieldsList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + INSTANCE_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getInstanceType().hashCode(); + hash = (37 * hash) + KEY_FIELD_FIELD_NUMBER; + hash = (53 * hash) + getKeyField().hashCode(); + if (getIncludedFieldsCount() > 0) { + hash = (37 * hash) + INCLUDED_FIELDS_FIELD_NUMBER; + hash = (53 * hash) + getIncludedFieldsList().hashCode(); + } + if (getExcludedFieldsCount() > 0) { + hash = (37 * hash) + EXCLUDED_FIELDS_FIELD_NUMBER; + hash = (53 * hash) + getExcludedFieldsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+     * Configuration defining how to transform batch prediction input instances to
+     * the instances that the Model accepts.
+     * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.class, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder + .class); + } + + // Construct using + // com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + instanceType_ = ""; + + keyField_ = ""; + + includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig build() { + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig buildPartial() { + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result = + new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig(this); + int from_bitField0_ = bitField0_; + result.instanceType_ = instanceType_; + result.keyField_ = keyField_; + if (((bitField0_ & 0x00000001) != 0)) { + includedFields_ = includedFields_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.includedFields_ = includedFields_; + if (((bitField0_ & 0x00000002) != 0)) { + excludedFields_ = excludedFields_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.excludedFields_ = excludedFields_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) { + return mergeFrom( + (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig other) { + if (other + == com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + .getDefaultInstance()) return this; + if (!other.getInstanceType().isEmpty()) { + instanceType_ = other.instanceType_; + onChanged(); + } + if (!other.getKeyField().isEmpty()) { + keyField_ = other.keyField_; + onChanged(); + } + if (!other.includedFields_.isEmpty()) { + if (includedFields_.isEmpty()) { + includedFields_ = other.includedFields_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureIncludedFieldsIsMutable(); + includedFields_.addAll(other.includedFields_); + } + onChanged(); + } + if (!other.excludedFields_.isEmpty()) { + if (excludedFields_.isEmpty()) { + excludedFields_ = other.excludedFields_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureExcludedFieldsIsMutable(); + excludedFields_.addAll(other.excludedFields_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + instanceType_ = input.readStringRequireUtf8(); + + break; + } // case 10 + case 18: + { + keyField_ = input.readStringRequireUtf8(); + + break; + } // case 18 + case 26: + { + java.lang.String s = input.readStringRequireUtf8(); + ensureIncludedFieldsIsMutable(); + includedFields_.add(s); + break; + } // case 26 + case 34: + { + java.lang.String s = input.readStringRequireUtf8(); + ensureExcludedFieldsIsMutable(); + excludedFields_.add(s); + break; + } // case 34 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object instanceType_ = ""; + /** + * + * + *
+       * The format of the instance that the Model accepts. Vertex AI will
+       * convert compatible
+       * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+       * to the specified format.
+       * Supported values are:
+       * * `object`: Each input is converted to JSON object format.
+       *     * For `bigquery`, each row is converted to an object.
+       *     * For `jsonl`, each line of the JSONL input must be an object.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * * `array`: Each input is converted to JSON array format.
+       *     * For `bigquery`, each row is converted to an array. The order
+       *       of columns is determined by the BigQuery column order, unless
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * For `jsonl`, if each line of the JSONL input is an object,
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * If not specified, Vertex AI converts the batch prediction input as
+       * follows:
+       *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+       *    order of columns is the same as defined in the file or table, unless
+       *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *  * For `jsonl`, the prediction instance format is determined by
+       *    each line of the input.
+       *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+       *    an object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the record.
+       *  * For `file-list`, each file in the list will be converted to an
+       *    object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the file.
+       * 
+ * + * string instance_type = 1; + * + * @return The instanceType. + */ + public java.lang.String getInstanceType() { + java.lang.Object ref = instanceType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + instanceType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * The format of the instance that the Model accepts. Vertex AI will
+       * convert compatible
+       * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+       * to the specified format.
+       * Supported values are:
+       * * `object`: Each input is converted to JSON object format.
+       *     * For `bigquery`, each row is converted to an object.
+       *     * For `jsonl`, each line of the JSONL input must be an object.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * * `array`: Each input is converted to JSON array format.
+       *     * For `bigquery`, each row is converted to an array. The order
+       *       of columns is determined by the BigQuery column order, unless
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * For `jsonl`, if each line of the JSONL input is an object,
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * If not specified, Vertex AI converts the batch prediction input as
+       * follows:
+       *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+       *    order of columns is the same as defined in the file or table, unless
+       *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *  * For `jsonl`, the prediction instance format is determined by
+       *    each line of the input.
+       *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+       *    an object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the record.
+       *  * For `file-list`, each file in the list will be converted to an
+       *    object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the file.
+       * 
+ * + * string instance_type = 1; + * + * @return The bytes for instanceType. + */ + public com.google.protobuf.ByteString getInstanceTypeBytes() { + java.lang.Object ref = instanceType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + instanceType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * The format of the instance that the Model accepts. Vertex AI will
+       * convert compatible
+       * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+       * to the specified format.
+       * Supported values are:
+       * * `object`: Each input is converted to JSON object format.
+       *     * For `bigquery`, each row is converted to an object.
+       *     * For `jsonl`, each line of the JSONL input must be an object.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * * `array`: Each input is converted to JSON array format.
+       *     * For `bigquery`, each row is converted to an array. The order
+       *       of columns is determined by the BigQuery column order, unless
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * For `jsonl`, if each line of the JSONL input is an object,
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * If not specified, Vertex AI converts the batch prediction input as
+       * follows:
+       *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+       *    order of columns is the same as defined in the file or table, unless
+       *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *  * For `jsonl`, the prediction instance format is determined by
+       *    each line of the input.
+       *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+       *    an object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the record.
+       *  * For `file-list`, each file in the list will be converted to an
+       *    object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the file.
+       * 
+ * + * string instance_type = 1; + * + * @param value The instanceType to set. + * @return This builder for chaining. + */ + public Builder setInstanceType(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + instanceType_ = value; + onChanged(); + return this; + } + /** + * + * + *
+       * The format of the instance that the Model accepts. Vertex AI will
+       * convert compatible
+       * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+       * to the specified format.
+       * Supported values are:
+       * * `object`: Each input is converted to JSON object format.
+       *     * For `bigquery`, each row is converted to an object.
+       *     * For `jsonl`, each line of the JSONL input must be an object.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * * `array`: Each input is converted to JSON array format.
+       *     * For `bigquery`, each row is converted to an array. The order
+       *       of columns is determined by the BigQuery column order, unless
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * For `jsonl`, if each line of the JSONL input is an object,
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * If not specified, Vertex AI converts the batch prediction input as
+       * follows:
+       *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+       *    order of columns is the same as defined in the file or table, unless
+       *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *  * For `jsonl`, the prediction instance format is determined by
+       *    each line of the input.
+       *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+       *    an object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the record.
+       *  * For `file-list`, each file in the list will be converted to an
+       *    object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the file.
+       * 
+ * + * string instance_type = 1; + * + * @return This builder for chaining. + */ + public Builder clearInstanceType() { + + instanceType_ = getDefaultInstance().getInstanceType(); + onChanged(); + return this; + } + /** + * + * + *
+       * The format of the instance that the Model accepts. Vertex AI will
+       * convert compatible
+       * [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format]
+       * to the specified format.
+       * Supported values are:
+       * * `object`: Each input is converted to JSON object format.
+       *     * For `bigquery`, each row is converted to an object.
+       *     * For `jsonl`, each line of the JSONL input must be an object.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * * `array`: Each input is converted to JSON array format.
+       *     * For `bigquery`, each row is converted to an array. The order
+       *       of columns is determined by the BigQuery column order, unless
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * For `jsonl`, if each line of the JSONL input is an object,
+       *       [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders.
+       *     * Does not apply to `csv`, `file-list`, `tf-record`, or
+       *       `tf-record-gzip`.
+       * If not specified, Vertex AI converts the batch prediction input as
+       * follows:
+       *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
+       *    order of columns is the same as defined in the file or table, unless
+       *    [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated.
+       *  * For `jsonl`, the prediction instance format is determined by
+       *    each line of the input.
+       *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
+       *    an object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the record.
+       *  * For `file-list`, each file in the list will be converted to an
+       *    object in the format of `{"b64": <value>}`, where `<value>` is
+       *    the Base64-encoded string of the content of the file.
+       * 
+ * + * string instance_type = 1; + * + * @param value The bytes for instanceType to set. + * @return This builder for chaining. + */ + public Builder setInstanceTypeBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + instanceType_ = value; + onChanged(); + return this; + } + + private java.lang.Object keyField_ = ""; + /** + * + * + *
+       * The name of the field that is considered as a key.
+       * The values identified by the key field is not included in the transformed
+       * instances that is sent to the Model. This is similar to
+       * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+       * the batch prediction output will not include the instances. Instead the
+       * output will only include the value of the key field, in a field named
+       * `key` in the output:
+       *  * For `jsonl` output format, the output will have a `key` field
+       *    instead of the `instance` field.
+       *  * For `csv`/`bigquery` output format, the output will have have a `key`
+       *    column instead of the instance feature columns.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * string key_field = 2; + * + * @return The keyField. + */ + public java.lang.String getKeyField() { + java.lang.Object ref = keyField_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + keyField_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * The name of the field that is considered as a key.
+       * The values identified by the key field is not included in the transformed
+       * instances that is sent to the Model. This is similar to
+       * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+       * the batch prediction output will not include the instances. Instead the
+       * output will only include the value of the key field, in a field named
+       * `key` in the output:
+       *  * For `jsonl` output format, the output will have a `key` field
+       *    instead of the `instance` field.
+       *  * For `csv`/`bigquery` output format, the output will have have a `key`
+       *    column instead of the instance feature columns.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * string key_field = 2; + * + * @return The bytes for keyField. + */ + public com.google.protobuf.ByteString getKeyFieldBytes() { + java.lang.Object ref = keyField_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + keyField_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * The name of the field that is considered as a key.
+       * The values identified by the key field is not included in the transformed
+       * instances that is sent to the Model. This is similar to
+       * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+       * the batch prediction output will not include the instances. Instead the
+       * output will only include the value of the key field, in a field named
+       * `key` in the output:
+       *  * For `jsonl` output format, the output will have a `key` field
+       *    instead of the `instance` field.
+       *  * For `csv`/`bigquery` output format, the output will have have a `key`
+       *    column instead of the instance feature columns.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * string key_field = 2; + * + * @param value The keyField to set. + * @return This builder for chaining. + */ + public Builder setKeyField(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + keyField_ = value; + onChanged(); + return this; + } + /** + * + * + *
+       * The name of the field that is considered as a key.
+       * The values identified by the key field is not included in the transformed
+       * instances that is sent to the Model. This is similar to
+       * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+       * the batch prediction output will not include the instances. Instead the
+       * output will only include the value of the key field, in a field named
+       * `key` in the output:
+       *  * For `jsonl` output format, the output will have a `key` field
+       *    instead of the `instance` field.
+       *  * For `csv`/`bigquery` output format, the output will have have a `key`
+       *    column instead of the instance feature columns.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * string key_field = 2; + * + * @return This builder for chaining. + */ + public Builder clearKeyField() { + + keyField_ = getDefaultInstance().getKeyField(); + onChanged(); + return this; + } + /** + * + * + *
+       * The name of the field that is considered as a key.
+       * The values identified by the key field is not included in the transformed
+       * instances that is sent to the Model. This is similar to
+       * specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition,
+       * the batch prediction output will not include the instances. Instead the
+       * output will only include the value of the key field, in a field named
+       * `key` in the output:
+       *  * For `jsonl` output format, the output will have a `key` field
+       *    instead of the `instance` field.
+       *  * For `csv`/`bigquery` output format, the output will have have a `key`
+       *    column instead of the instance feature columns.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * string key_field = 2; + * + * @param value The bytes for keyField to set. + * @return This builder for chaining. + */ + public Builder setKeyFieldBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + keyField_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList includedFields_ = + com.google.protobuf.LazyStringArrayList.EMPTY; + + private void ensureIncludedFieldsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + includedFields_ = new com.google.protobuf.LazyStringArrayList(includedFields_); + bitField0_ |= 0x00000001; + } + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @return A list containing the includedFields. + */ + public com.google.protobuf.ProtocolStringList getIncludedFieldsList() { + return includedFields_.getUnmodifiableView(); + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @return The count of includedFields. + */ + public int getIncludedFieldsCount() { + return includedFields_.size(); + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @param index The index of the element to return. + * @return The includedFields at the given index. + */ + public java.lang.String getIncludedFields(int index) { + return includedFields_.get(index); + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @param index The index of the value to return. + * @return The bytes of the includedFields at the given index. + */ + public com.google.protobuf.ByteString getIncludedFieldsBytes(int index) { + return includedFields_.getByteString(index); + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @param index The index to set the value at. + * @param value The includedFields to set. + * @return This builder for chaining. + */ + public Builder setIncludedFields(int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureIncludedFieldsIsMutable(); + includedFields_.set(index, value); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @param value The includedFields to add. + * @return This builder for chaining. + */ + public Builder addIncludedFields(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureIncludedFieldsIsMutable(); + includedFields_.add(value); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @param values The includedFields to add. + * @return This builder for chaining. + */ + public Builder addAllIncludedFields(java.lang.Iterable values) { + ensureIncludedFieldsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, includedFields_); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @return This builder for chaining. + */ + public Builder clearIncludedFields() { + includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be included in the prediction instance that is
+       * sent to the Model.
+       * If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in
+       * included_fields also determines the order of the values in the array.
+       * When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string included_fields = 3; + * + * @param value The bytes of the includedFields to add. + * @return This builder for chaining. + */ + public Builder addIncludedFieldsBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureIncludedFieldsIsMutable(); + includedFields_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList excludedFields_ = + com.google.protobuf.LazyStringArrayList.EMPTY; + + private void ensureExcludedFieldsIsMutable() { + if (!((bitField0_ & 0x00000002) != 0)) { + excludedFields_ = new com.google.protobuf.LazyStringArrayList(excludedFields_); + bitField0_ |= 0x00000002; + } + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @return A list containing the excludedFields. + */ + public com.google.protobuf.ProtocolStringList getExcludedFieldsList() { + return excludedFields_.getUnmodifiableView(); + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @return The count of excludedFields. + */ + public int getExcludedFieldsCount() { + return excludedFields_.size(); + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index of the element to return. + * @return The excludedFields at the given index. + */ + public java.lang.String getExcludedFields(int index) { + return excludedFields_.get(index); + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index of the value to return. + * @return The bytes of the excludedFields at the given index. + */ + public com.google.protobuf.ByteString getExcludedFieldsBytes(int index) { + return excludedFields_.getByteString(index); + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @param index The index to set the value at. + * @param value The excludedFields to set. + * @return This builder for chaining. + */ + public Builder setExcludedFields(int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExcludedFieldsIsMutable(); + excludedFields_.set(index, value); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @param value The excludedFields to add. + * @return This builder for chaining. + */ + public Builder addExcludedFields(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExcludedFieldsIsMutable(); + excludedFields_.add(value); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @param values The excludedFields to add. + * @return This builder for chaining. + */ + public Builder addAllExcludedFields(java.lang.Iterable values) { + ensureExcludedFieldsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, excludedFields_); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @return This builder for chaining. + */ + public Builder clearExcludedFields() { + excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+       * Fields that will be excluded in the prediction instance that is
+       * sent to the Model.
+       * Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field]
+       * is not specified.
+       * When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty.
+       * The input must be JSONL with objects at each line, CSV, BigQuery
+       * or TfRecord.
+       * 
+ * + * repeated string excluded_fields = 4; + * + * @param value The bytes of the excludedFields to add. + * @return This builder for chaining. + */ + public Builder addExcludedFieldsBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureExcludedFieldsIsMutable(); + excludedFields_.add(value); + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) + private static final com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig(); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InstanceConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException() + .setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + public interface OutputConfigOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) @@ -5199,6 +7287,61 @@ public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig getInp return getInputConfig(); } + public static final int INSTANCE_CONFIG_FIELD_NUMBER = 27; + private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instanceConfig_; + /** + * + * + *
+   * Configuration for how to convert batch prediction input instances to the
+   * prediction instances that are sent to the Model.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + * + * @return Whether the instanceConfig field is set. + */ + @java.lang.Override + public boolean hasInstanceConfig() { + return instanceConfig_ != null; + } + /** + * + * + *
+   * Configuration for how to convert batch prediction input instances to the
+   * prediction instances that are sent to the Model.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + * + * @return The instanceConfig. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getInstanceConfig() { + return instanceConfig_ == null + ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.getDefaultInstance() + : instanceConfig_; + } + /** + * + * + *
+   * Configuration for how to convert batch prediction input instances to the
+   * prediction instances that are sent to the Model.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder + getInstanceConfigOrBuilder() { + return getInstanceConfig(); + } + public static final int MODEL_PARAMETERS_FIELD_NUMBER = 5; private com.google.protobuf.Value modelParameters_; /** @@ -6616,6 +8759,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (modelMonitoringConfig_ != null) { output.writeMessage(26, getModelMonitoringConfig()); } + if (instanceConfig_ != null) { + output.writeMessage(27, getInstanceConfig()); + } if (unmanagedContainerModel_ != null) { output.writeMessage(28, getUnmanagedContainerModel()); } @@ -6719,6 +8865,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream.computeMessageSize(26, getModelMonitoringConfig()); } + if (instanceConfig_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(27, getInstanceConfig()); + } if (unmanagedContainerModel_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( @@ -6767,6 +8916,10 @@ public boolean equals(final java.lang.Object obj) { if (hasInputConfig()) { if (!getInputConfig().equals(other.getInputConfig())) return false; } + if (hasInstanceConfig() != other.hasInstanceConfig()) return false; + if (hasInstanceConfig()) { + if (!getInstanceConfig().equals(other.getInstanceConfig())) return false; + } if (hasModelParameters() != other.hasModelParameters()) return false; if (hasModelParameters()) { if (!getModelParameters().equals(other.getModelParameters())) return false; @@ -6866,6 +9019,10 @@ public int hashCode() { hash = (37 * hash) + INPUT_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getInputConfig().hashCode(); } + if (hasInstanceConfig()) { + hash = (37 * hash) + INSTANCE_CONFIG_FIELD_NUMBER; + hash = (53 * hash) + getInstanceConfig().hashCode(); + } if (hasModelParameters()) { hash = (37 * hash) + MODEL_PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getModelParameters().hashCode(); @@ -7130,6 +9287,12 @@ public Builder clear() { inputConfig_ = null; inputConfigBuilder_ = null; } + if (instanceConfigBuilder_ == null) { + instanceConfig_ = null; + } else { + instanceConfig_ = null; + instanceConfigBuilder_ = null; + } if (modelParametersBuilder_ == null) { modelParameters_ = null; } else { @@ -7289,6 +9452,11 @@ public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob buildPartial() { } else { result.inputConfig_ = inputConfigBuilder_.build(); } + if (instanceConfigBuilder_ == null) { + result.instanceConfig_ = instanceConfig_; + } else { + result.instanceConfig_ = instanceConfigBuilder_.build(); + } if (modelParametersBuilder_ == null) { result.modelParameters_ = modelParameters_; } else { @@ -7465,6 +9633,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob if (other.hasInputConfig()) { mergeInputConfig(other.getInputConfig()); } + if (other.hasInstanceConfig()) { + mergeInstanceConfig(other.getInstanceConfig()); + } if (other.hasModelParameters()) { mergeModelParameters(other.getModelParameters()); } @@ -7758,6 +9929,12 @@ public Builder mergeFrom( break; } // case 210 + case 218: + { + input.readMessage(getInstanceConfigFieldBuilder().getBuilder(), extensionRegistry); + + break; + } // case 218 case 226: { input.readMessage( @@ -8747,6 +10924,228 @@ public Builder clearInputConfig() { return inputConfigBuilder_; } + private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instanceConfig_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder> + instanceConfigBuilder_; + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + * + * @return Whether the instanceConfig field is set. + */ + public boolean hasInstanceConfig() { + return instanceConfigBuilder_ != null || instanceConfig_ != null; + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + * + * @return The instanceConfig. + */ + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + getInstanceConfig() { + if (instanceConfigBuilder_ == null) { + return instanceConfig_ == null + ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + .getDefaultInstance() + : instanceConfig_; + } else { + return instanceConfigBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + public Builder setInstanceConfig( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig value) { + if (instanceConfigBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + instanceConfig_ = value; + onChanged(); + } else { + instanceConfigBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + public Builder setInstanceConfig( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder + builderForValue) { + if (instanceConfigBuilder_ == null) { + instanceConfig_ = builderForValue.build(); + onChanged(); + } else { + instanceConfigBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + public Builder mergeInstanceConfig( + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig value) { + if (instanceConfigBuilder_ == null) { + if (instanceConfig_ != null) { + instanceConfig_ = + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.newBuilder( + instanceConfig_) + .mergeFrom(value) + .buildPartial(); + } else { + instanceConfig_ = value; + } + onChanged(); + } else { + instanceConfigBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + public Builder clearInstanceConfig() { + if (instanceConfigBuilder_ == null) { + instanceConfig_ = null; + onChanged(); + } else { + instanceConfig_ = null; + instanceConfigBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder + getInstanceConfigBuilder() { + + onChanged(); + return getInstanceConfigFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder + getInstanceConfigOrBuilder() { + if (instanceConfigBuilder_ != null) { + return instanceConfigBuilder_.getMessageOrBuilder(); + } else { + return instanceConfig_ == null + ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig + .getDefaultInstance() + : instanceConfig_; + } + } + /** + * + * + *
+     * Configuration for how to convert batch prediction input instances to the
+     * prediction instances that are sent to the Model.
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder> + getInstanceConfigFieldBuilder() { + if (instanceConfigBuilder_ == null) { + instanceConfigBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder, + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder>( + getInstanceConfig(), getParentForChildren(), isClean()); + instanceConfig_ = null; + } + return instanceConfigBuilder_; + } + private com.google.protobuf.Value modelParameters_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Value, diff --git a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java index ddc8f4c575c1..b44967c7b38d 100644 --- a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java +++ b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java @@ -234,6 +234,48 @@ public interface BatchPredictionJobOrBuilder com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder getInputConfigOrBuilder(); + /** + * + * + *
+   * Configuration for how to convert batch prediction input instances to the
+   * prediction instances that are sent to the Model.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + * + * @return Whether the instanceConfig field is set. + */ + boolean hasInstanceConfig(); + /** + * + * + *
+   * Configuration for how to convert batch prediction input instances to the
+   * prediction instances that are sent to the Model.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + * + * @return The instanceConfig. + */ + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getInstanceConfig(); + /** + * + * + *
+   * Configuration for how to convert batch prediction input instances to the
+   * prediction instances that are sent to the Model.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; + * + */ + com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder + getInstanceConfigOrBuilder(); + /** * * diff --git a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java index 7f78cef8e1e7..ac3e6ea05601 100644 --- a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java +++ b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java @@ -35,6 +35,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable @@ -76,7 +80,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "tform/v1beta1/unmanaged_container_model." + "proto\032\034google/protobuf/struct.proto\032\037goo" + "gle/protobuf/timestamp.proto\032\027google/rpc" - + "/status.proto\"\237\024\n\022BatchPredictionJob\022\021\n\004" + + "/status.proto\"\352\025\n\022BatchPredictionJob\022\021\n\004" + "name\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340" + "A\002\0223\n\005model\030\003 \001(\tB$\372A!\n\037aiplatform.googl" + "eapis.com/Model\022\035\n\020model_version_id\030\036 \001(" @@ -84,70 +88,75 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "\01328.google.cloud.aiplatform.v1beta1.Unma" + "nagedContainerModel\022Z\n\014input_config\030\004 \001(" + "\0132?.google.cloud.aiplatform.v1beta1.Batc" - + "hPredictionJob.InputConfigB\003\340A\002\0220\n\020model" - + "_parameters\030\005 \001(\0132\026.google.protobuf.Valu" - + "e\022\\\n\routput_config\030\006 \001(\0132@.google.cloud." - + "aiplatform.v1beta1.BatchPredictionJob.Ou" - + "tputConfigB\003\340A\002\022U\n\023dedicated_resources\030\007" - + " \001(\01328.google.cloud.aiplatform.v1beta1.B" - + "atchDedicatedResources\022\027\n\017service_accoun" - + "t\030\035 \001(\t\022i\n\036manual_batch_tuning_parameter" - + "s\030\010 \001(\0132<.google.cloud.aiplatform.v1beta" - + "1.ManualBatchTuningParametersB\003\340A\005\022\034\n\024ge" - + "nerate_explanation\030\027 \001(\010\022J\n\020explanation_" - + "spec\030\031 \001(\01320.google.cloud.aiplatform.v1b" - + "eta1.ExplanationSpec\022X\n\013output_info\030\t \001(" - + "\0132>.google.cloud.aiplatform.v1beta1.Batc" - + "hPredictionJob.OutputInfoB\003\340A\003\022=\n\005state\030" - + "\n \001(\0162).google.cloud.aiplatform.v1beta1." - + "JobStateB\003\340A\003\022&\n\005error\030\013 \001(\0132\022.google.rp" - + "c.StatusB\003\340A\003\0221\n\020partial_failures\030\014 \003(\0132" - + "\022.google.rpc.StatusB\003\340A\003\022S\n\022resources_co" - + "nsumed\030\r \001(\01322.google.cloud.aiplatform.v" - + "1beta1.ResourcesConsumedB\003\340A\003\022O\n\020complet" - + "ion_stats\030\016 \001(\01320.google.cloud.aiplatfor" - + "m.v1beta1.CompletionStatsB\003\340A\003\0224\n\013create" - + "_time\030\017 \001(\0132\032.google.protobuf.TimestampB" - + "\003\340A\003\0223\n\nstart_time\030\020 \001(\0132\032.google.protob" - + "uf.TimestampB\003\340A\003\0221\n\010end_time\030\021 \001(\0132\032.go" - + "ogle.protobuf.TimestampB\003\340A\003\0224\n\013update_t" - + "ime\030\022 \001(\0132\032.google.protobuf.TimestampB\003\340" - + "A\003\022O\n\006labels\030\023 \003(\0132?.google.cloud.aiplat" - + "form.v1beta1.BatchPredictionJob.LabelsEn" - + "try\022H\n\017encryption_spec\030\030 \001(\0132/.google.cl" - + "oud.aiplatform.v1beta1.EncryptionSpec\022W\n" - + "\027model_monitoring_config\030\032 \001(\01326.google." - + "cloud.aiplatform.v1beta1.ModelMonitoring" - + "Config\022h\n model_monitoring_stats_anomali" - + "es\030\037 \003(\0132>.google.cloud.aiplatform.v1bet" - + "a1.ModelMonitoringStatsAnomalies\0228\n\027mode" - + "l_monitoring_status\030 \001(\0132\022.google.rpc.S" - + "tatusB\003\340A\003\032\304\001\n\013InputConfig\022@\n\ngcs_source" - + "\030\002 \001(\0132*.google.cloud.aiplatform.v1beta1" - + ".GcsSourceH\000\022J\n\017bigquery_source\030\003 \001(\0132/." - + "google.cloud.aiplatform.v1beta1.BigQuery" - + "SourceH\000\022\035\n\020instances_format\030\001 \001(\tB\003\340A\002B" - + "\010\n\006source\032\340\001\n\014OutputConfig\022J\n\017gcs_destin" - + "ation\030\002 \001(\0132/.google.cloud.aiplatform.v1" - + "beta1.GcsDestinationH\000\022T\n\024bigquery_desti" - + "nation\030\003 \001(\01324.google.cloud.aiplatform.v" - + "1beta1.BigQueryDestinationH\000\022\037\n\022predicti" - + "ons_format\030\001 \001(\tB\003\340A\002B\r\n\013destination\032\220\001\n" - + "\nOutputInfo\022#\n\024gcs_output_directory\030\001 \001(" - + "\tB\003\340A\003H\000\022&\n\027bigquery_output_dataset\030\002 \001(" - + "\tB\003\340A\003H\000\022\"\n\025bigquery_output_table\030\004 \001(\tB" - + "\003\340A\003B\021\n\017output_location\032-\n\013LabelsEntry\022\013" - + "\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\206\001\352A\202\001\n,a" - + "iplatform.googleapis.com/BatchPrediction" - + "Job\022Rprojects/{project}/locations/{locat" - + "ion}/batchPredictionJobs/{batch_predicti" - + "on_job}B\364\001\n#com.google.cloud.aiplatform." - + "v1beta1B\027BatchPredictionJobProtoP\001ZIgoog" - + "le.golang.org/genproto/googleapis/cloud/" - + "aiplatform/v1beta1;aiplatform\252\002\037Google.C" - + "loud.AIPlatform.V1Beta1\312\002\037Google\\Cloud\\A" - + "IPlatform\\V1beta1\352\002\"Google::Cloud::AIPla" - + "tform::V1beta1b\006proto3" + + "hPredictionJob.InputConfigB\003\340A\002\022[\n\017insta" + + "nce_config\030\033 \001(\0132B.google.cloud.aiplatfo" + + "rm.v1beta1.BatchPredictionJob.InstanceCo" + + "nfig\0220\n\020model_parameters\030\005 \001(\0132\026.google." + + "protobuf.Value\022\\\n\routput_config\030\006 \001(\0132@." + + "google.cloud.aiplatform.v1beta1.BatchPre" + + "dictionJob.OutputConfigB\003\340A\002\022U\n\023dedicate" + + "d_resources\030\007 \001(\01328.google.cloud.aiplatf" + + "orm.v1beta1.BatchDedicatedResources\022\027\n\017s" + + "ervice_account\030\035 \001(\t\022i\n\036manual_batch_tun" + + "ing_parameters\030\010 \001(\0132<.google.cloud.aipl" + + "atform.v1beta1.ManualBatchTuningParamete" + + "rsB\003\340A\005\022\034\n\024generate_explanation\030\027 \001(\010\022J\n" + + "\020explanation_spec\030\031 \001(\01320.google.cloud.a" + + "iplatform.v1beta1.ExplanationSpec\022X\n\013out" + + "put_info\030\t \001(\0132>.google.cloud.aiplatform" + + ".v1beta1.BatchPredictionJob.OutputInfoB\003" + + "\340A\003\022=\n\005state\030\n \001(\0162).google.cloud.aiplat" + + "form.v1beta1.JobStateB\003\340A\003\022&\n\005error\030\013 \001(" + + "\0132\022.google.rpc.StatusB\003\340A\003\0221\n\020partial_fa" + + "ilures\030\014 \003(\0132\022.google.rpc.StatusB\003\340A\003\022S\n" + + "\022resources_consumed\030\r \001(\01322.google.cloud" + + ".aiplatform.v1beta1.ResourcesConsumedB\003\340" + + "A\003\022O\n\020completion_stats\030\016 \001(\01320.google.cl" + + "oud.aiplatform.v1beta1.CompletionStatsB\003" + + "\340A\003\0224\n\013create_time\030\017 \001(\0132\032.google.protob" + + "uf.TimestampB\003\340A\003\0223\n\nstart_time\030\020 \001(\0132\032." + + "google.protobuf.TimestampB\003\340A\003\0221\n\010end_ti" + + "me\030\021 \001(\0132\032.google.protobuf.TimestampB\003\340A" + + "\003\0224\n\013update_time\030\022 \001(\0132\032.google.protobuf" + + ".TimestampB\003\340A\003\022O\n\006labels\030\023 \003(\0132?.google" + + ".cloud.aiplatform.v1beta1.BatchPredictio" + + "nJob.LabelsEntry\022H\n\017encryption_spec\030\030 \001(" + + "\0132/.google.cloud.aiplatform.v1beta1.Encr" + + "yptionSpec\022W\n\027model_monitoring_config\030\032 " + + "\001(\01326.google.cloud.aiplatform.v1beta1.Mo" + + "delMonitoringConfig\022h\n model_monitoring_" + + "stats_anomalies\030\037 \003(\0132>.google.cloud.aip" + + "latform.v1beta1.ModelMonitoringStatsAnom" + + "alies\0228\n\027model_monitoring_status\030 \001(\0132\022" + + ".google.rpc.StatusB\003\340A\003\032\304\001\n\013InputConfig\022" + + "@\n\ngcs_source\030\002 \001(\0132*.google.cloud.aipla" + + "tform.v1beta1.GcsSourceH\000\022J\n\017bigquery_so" + + "urce\030\003 \001(\0132/.google.cloud.aiplatform.v1b" + + "eta1.BigQuerySourceH\000\022\035\n\020instances_forma" + + "t\030\001 \001(\tB\003\340A\002B\010\n\006source\032l\n\016InstanceConfig" + + "\022\025\n\rinstance_type\030\001 \001(\t\022\021\n\tkey_field\030\002 \001" + + "(\t\022\027\n\017included_fields\030\003 \003(\t\022\027\n\017excluded_" + + "fields\030\004 \003(\t\032\340\001\n\014OutputConfig\022J\n\017gcs_des" + + "tination\030\002 \001(\0132/.google.cloud.aiplatform" + + ".v1beta1.GcsDestinationH\000\022T\n\024bigquery_de" + + "stination\030\003 \001(\01324.google.cloud.aiplatfor" + + "m.v1beta1.BigQueryDestinationH\000\022\037\n\022predi" + + "ctions_format\030\001 \001(\tB\003\340A\002B\r\n\013destination\032" + + "\220\001\n\nOutputInfo\022#\n\024gcs_output_directory\030\001" + + " \001(\tB\003\340A\003H\000\022&\n\027bigquery_output_dataset\030\002" + + " \001(\tB\003\340A\003H\000\022\"\n\025bigquery_output_table\030\004 \001" + + "(\tB\003\340A\003B\021\n\017output_location\032-\n\013LabelsEntr" + + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\206\001\352A\202\001" + + "\n,aiplatform.googleapis.com/BatchPredict" + + "ionJob\022Rprojects/{project}/locations/{lo" + + "cation}/batchPredictionJobs/{batch_predi" + + "ction_job}B\364\001\n#com.google.cloud.aiplatfo" + + "rm.v1beta1B\027BatchPredictionJobProtoP\001ZIg" + + "oogle.golang.org/genproto/googleapis/clo" + + "ud/aiplatform/v1beta1;aiplatform\252\002\037Googl" + + "e.Cloud.AIPlatform.V1Beta1\312\002\037Google\\Clou" + + "d\\AIPlatform\\V1beta1\352\002\"Google::Cloud::AI" + + "Platform::V1beta1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -181,6 +190,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "ModelVersionId", "UnmanagedContainerModel", "InputConfig", + "InstanceConfig", "ModelParameters", "OutputConfig", "DedicatedResources", @@ -214,10 +224,20 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new java.lang.String[] { "GcsSource", "BigquerySource", "InstancesFormat", "Source", }); - internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor = + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor = internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor .getNestedTypes() .get(1); + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor, + new java.lang.String[] { + "InstanceType", "KeyField", "IncludedFields", "ExcludedFields", + }); + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor = + internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor + .getNestedTypes() + .get(2); internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor, @@ -227,7 +247,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_descriptor = internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor .getNestedTypes() - .get(2); + .get(3); internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_descriptor, @@ -240,7 +260,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_LabelsEntry_descriptor = internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor .getNestedTypes() - .get(3); + .get(4); internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_LabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_LabelsEntry_descriptor, diff --git a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto index 63fb0d9d0c4e..fbb4163496b9 100644 --- a/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto +++ b/java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto @@ -73,6 +73,91 @@ message BatchPredictionJob { string instances_format = 1 [(google.api.field_behavior) = REQUIRED]; } + // Configuration defining how to transform batch prediction input instances to + // the instances that the Model accepts. + message InstanceConfig { + // The format of the instance that the Model accepts. Vertex AI will + // convert compatible + // [batch prediction input instance formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] + // to the specified format. + // + // Supported values are: + // + // * `object`: Each input is converted to JSON object format. + // * For `bigquery`, each row is converted to an object. + // * For `jsonl`, each line of the JSONL input must be an object. + // * Does not apply to `csv`, `file-list`, `tf-record`, or + // `tf-record-gzip`. + // + // * `array`: Each input is converted to JSON array format. + // * For `bigquery`, each row is converted to an array. The order + // of columns is determined by the BigQuery column order, unless + // [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated. + // [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders. + // * For `jsonl`, if each line of the JSONL input is an object, + // [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be populated for specifying field orders. + // * Does not apply to `csv`, `file-list`, `tf-record`, or + // `tf-record-gzip`. + // + // If not specified, Vertex AI converts the batch prediction input as + // follows: + // + // * For `bigquery` and `csv`, the behavior is the same as `array`. The + // order of columns is the same as defined in the file or table, unless + // [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] is populated. + // * For `jsonl`, the prediction instance format is determined by + // each line of the input. + // * For `tf-record`/`tf-record-gzip`, each record will be converted to + // an object in the format of `{"b64": }`, where `` is + // the Base64-encoded string of the content of the record. + // * For `file-list`, each file in the list will be converted to an + // object in the format of `{"b64": }`, where `` is + // the Base64-encoded string of the content of the file. + string instance_type = 1; + + // The name of the field that is considered as a key. + // + // The values identified by the key field is not included in the transformed + // instances that is sent to the Model. This is similar to + // specifying this name of the field in [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. In addition, + // the batch prediction output will not include the instances. Instead the + // output will only include the value of the key field, in a field named + // `key` in the output: + // + // * For `jsonl` output format, the output will have a `key` field + // instead of the `instance` field. + // * For `csv`/`bigquery` output format, the output will have have a `key` + // column instead of the instance feature columns. + // + // The input must be JSONL with objects at each line, CSV, BigQuery + // or TfRecord. + string key_field = 2; + + // Fields that will be included in the prediction instance that is + // sent to the Model. + // + // If [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] is `array`, the order of field names in + // included_fields also determines the order of the values in the array. + // + // When included_fields is populated, [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] must be empty. + // + // The input must be JSONL with objects at each line, CSV, BigQuery + // or TfRecord. + repeated string included_fields = 3; + + // Fields that will be excluded in the prediction instance that is + // sent to the Model. + // + // Excluded will be attached to the batch prediction output if [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] + // is not specified. + // + // When excluded_fields is populated, [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] must be empty. + // + // The input must be JSONL with objects at each line, CSV, BigQuery + // or TfRecord. + repeated string excluded_fields = 4; + } + // Configures the output of [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. // See [Model.supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats] for supported output // formats, and how predictions are expressed via any of them. @@ -183,6 +268,10 @@ message BatchPredictionJob { // [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. InputConfig input_config = 4 [(google.api.field_behavior) = REQUIRED]; + // Configuration for how to convert batch prediction input instances to the + // prediction instances that are sent to the Model. + InstanceConfig instance_config = 27; + // The parameters that govern the predictions. The schema of the parameters // may be specified via the [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] // [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata]