public abstract class BeamKafkaTable extends BaseBeamTable
BeamKafkaTable represent a Kafka topic, as source or target. Need to extend to convert
between BeamSqlRow and KV<byte[], byte[]>.schema| Modifier | Constructor and Description |
|---|---|
protected |
BeamKafkaTable(org.apache.beam.sdk.schemas.Schema beamSchema) |
|
BeamKafkaTable(org.apache.beam.sdk.schemas.Schema beamSchema,
java.util.List<org.apache.kafka.common.TopicPartition> topicPartitions,
java.lang.String bootstrapServers) |
|
BeamKafkaTable(org.apache.beam.sdk.schemas.Schema beamSchema,
java.lang.String bootstrapServers,
java.util.List<java.lang.String> topics) |
| Modifier and Type | Method and Description |
|---|---|
org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row> |
buildIOReader(org.apache.beam.sdk.values.PBegin begin)
create a
PCollection<Row> from source. |
org.apache.beam.sdk.values.POutput |
buildIOWriter(org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row> input)
create a
IO.write() instance to write to target. |
java.lang.String |
getBootstrapServers() |
abstract org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.KV<byte[],byte[]>>,org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row>> |
getPTransformForInput() |
abstract org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row>,org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.KV<byte[],byte[]>>> |
getPTransformForOutput() |
java.util.List<java.lang.String> |
getTopics() |
org.apache.beam.sdk.values.PCollection.IsBounded |
isBounded()
Whether this table is bounded (known to be finite) or unbounded (may or may not be finite).
|
BeamKafkaTable |
updateConsumerProperties(java.util.Map<java.lang.String,java.lang.Object> configUpdates) |
getSchemaprotected BeamKafkaTable(org.apache.beam.sdk.schemas.Schema beamSchema)
public BeamKafkaTable(org.apache.beam.sdk.schemas.Schema beamSchema,
java.lang.String bootstrapServers,
java.util.List<java.lang.String> topics)
public BeamKafkaTable(org.apache.beam.sdk.schemas.Schema beamSchema,
java.util.List<org.apache.kafka.common.TopicPartition> topicPartitions,
java.lang.String bootstrapServers)
public BeamKafkaTable updateConsumerProperties(java.util.Map<java.lang.String,java.lang.Object> configUpdates)
public org.apache.beam.sdk.values.PCollection.IsBounded isBounded()
BeamSqlTablepublic abstract org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.KV<byte[],byte[]>>,org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row>> getPTransformForInput()
public abstract org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row>,org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.KV<byte[],byte[]>>> getPTransformForOutput()
public org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row> buildIOReader(org.apache.beam.sdk.values.PBegin begin)
BeamSqlTablePCollection<Row> from source.public org.apache.beam.sdk.values.POutput buildIOWriter(org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.Row> input)
BeamSqlTableIO.write() instance to write to target.public java.lang.String getBootstrapServers()
public java.util.List<java.lang.String> getTopics()