Package org.apache.beam.sdk.io.kafka
Class KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration
- java.lang.Object
-
- org.apache.beam.sdk.io.kafka.KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration
-
- All Implemented Interfaces:
java.io.Serializable
- Enclosing class:
- KafkaWriteSchemaTransformProvider
@DefaultSchema(org.apache.beam.sdk.schemas.AutoValueSchema.class) public abstract static class KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration extends java.lang.Object implements java.io.Serializable- See Also:
- Serialized Form
-
-
Nested Class Summary
Nested Classes Modifier and Type Class Description static classKafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration.Builder
-
Constructor Summary
Constructors Constructor Description KafkaWriteSchemaTransformConfiguration()
-
Method Summary
All Methods Static Methods Instance Methods Abstract Methods Concrete Methods Modifier and Type Method Description static KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration.Builderbuilder()abstract java.lang.StringgetBootstrapServers()abstract org.apache.beam.sdk.schemas.transforms.providers.ErrorHandlinggetErrorHandling()abstract java.lang.StringgetFileDescriptorPath()abstract java.lang.StringgetFormat()abstract java.lang.StringgetMessageName()abstract java.util.Map<java.lang.String,java.lang.String>getProducerConfigUpdates()abstract java.lang.StringgetSchema()abstract java.lang.StringgetTopic()
-
-
-
Method Detail
-
getFormat
@SchemaFieldDescription("The encoding format for the data stored in Kafka. Valid options are: RAW,JSON,AVRO,PROTO") public abstract java.lang.String getFormat()
-
getTopic
public abstract java.lang.String getTopic()
-
getBootstrapServers
@SchemaFieldDescription("A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping\u2014this list only impacts the initial hosts used to discover the full set of servers. | Format: host1:port1,host2:port2,...") public abstract java.lang.String getBootstrapServers()
-
getProducerConfigUpdates
@SchemaFieldDescription("A list of key-value pairs that act as configuration parameters for Kafka producers. Most of these configurations will not be needed, but if you need to customize your Kafka producer, you may use this. See a detailed list: https://docs.confluent.io/platform/current/installation/configuration/producer-configs.html") @Nullable public abstract java.util.Map<java.lang.String,java.lang.String> getProducerConfigUpdates()
-
getErrorHandling
@SchemaFieldDescription("This option specifies whether and where to output unwritable rows.") @Nullable public abstract org.apache.beam.sdk.schemas.transforms.providers.ErrorHandling getErrorHandling()
-
getFileDescriptorPath
@SchemaFieldDescription("The path to the Protocol Buffer File Descriptor Set file. This file is used for schema definition and message serialization.") @Nullable public abstract java.lang.String getFileDescriptorPath()
-
getMessageName
@SchemaFieldDescription("The name of the Protocol Buffer message to be used for schema extraction and data conversion.") @Nullable public abstract java.lang.String getMessageName()
-
getSchema
@Nullable public abstract java.lang.String getSchema()
-
builder
public static KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration.Builder builder()
-
-