Serialized Form
-
Package org.apache.beam.runners.spark.coders
-
Class org.apache.beam.runners.spark.coders.CoderHelpers.FromByteFunction extends java.lang.Object implements Serializable
-
-
Package org.apache.beam.runners.spark.examples
-
Class org.apache.beam.runners.spark.examples.WordCount.CountWords extends org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<java.lang.String>,org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.KV<java.lang.String,java.lang.Long>>> implements Serializable
-
Class org.apache.beam.runners.spark.examples.WordCount.FormatAsTextFn extends org.apache.beam.sdk.transforms.SimpleFunction<org.apache.beam.sdk.values.KV<java.lang.String,java.lang.Long>,java.lang.String> implements Serializable
-
-
Package org.apache.beam.runners.spark.io
-
Class org.apache.beam.runners.spark.io.ConsoleIO.Write.Unbound extends org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<T>,org.apache.beam.sdk.values.PDone> implements Serializable
-
Serialized Fields
-
num
int num
-
-
-
Class org.apache.beam.runners.spark.io.CreateStream extends org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PBegin,org.apache.beam.sdk.values.PCollection<T>> implements Serializable
-
Serialized Fields
-
batchDuration
org.joda.time.Duration batchDuration
-
batches
java.util.Queue<java.lang.Iterable<org.apache.beam.sdk.values.TimestampedValue<T>>> batches
-
coder
org.apache.beam.sdk.coders.Coder<T> coder
-
forceWatermarkSync
boolean forceWatermarkSync
-
initialSystemTime
org.joda.time.Instant initialSystemTime
-
lowWatermark
org.joda.time.Instant lowWatermark
-
times
java.util.Deque<GlobalWatermarkHolder.SparkWatermarks> times
-
-
-
Class org.apache.beam.runners.spark.io.EmptyCheckpointMark extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.io.MicrobatchSource extends org.apache.beam.sdk.io.Source<T> implements Serializable
-
Serialized Fields
-
maxNumRecords
long maxNumRecords
-
maxReadTime
org.joda.time.Duration maxReadTime
-
numInitialSplits
int numInitialSplits
-
readerCacheInterval
double readerCacheInterval
-
source
org.apache.beam.sdk.io.UnboundedSource<T,CheckpointMarkT extends org.apache.beam.sdk.io.UnboundedSource.CheckpointMark> source
-
sourceId
int sourceId
-
splitId
int splitId
-
-
-
Class org.apache.beam.runners.spark.io.SourceRDD.Bounded extends org.apache.spark.rdd.RDD<org.apache.beam.sdk.util.WindowedValue<T>> implements Serializable
-
Serialized Fields
-
bundleSize
long bundleSize
-
metricsAccum
MetricsContainerStepMapAccumulator metricsAccum
-
numPartitions
int numPartitions
-
options
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
-
source
org.apache.beam.sdk.io.BoundedSource<T> source
-
stepName
java.lang.String stepName
-
-
-
Class org.apache.beam.runners.spark.io.SourceRDD.Unbounded extends org.apache.spark.rdd.RDD<scala.Tuple2<org.apache.beam.sdk.io.Source<T>,CheckpointMarkT extends org.apache.beam.sdk.io.UnboundedSource.CheckpointMark>> implements Serializable
-
Serialized Fields
-
microbatchSource
MicrobatchSource<T,CheckpointMarkT extends org.apache.beam.sdk.io.UnboundedSource.CheckpointMark> microbatchSource
-
options
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
-
partitioner
org.apache.spark.Partitioner partitioner
-
-
-
Class org.apache.beam.runners.spark.io.SparkUnboundedSource.Metadata extends java.lang.Object implements Serializable
-
Serialized Fields
-
highWatermark
org.joda.time.Instant highWatermark
-
lowWatermark
org.joda.time.Instant lowWatermark
-
metricsContainers
org.apache.beam.runners.core.metrics.MetricsContainerStepMap metricsContainers
-
numRecords
long numRecords
-
readDurationMillis
long readDurationMillis
-
-
-
-
Package org.apache.beam.runners.spark.metrics
-
Class org.apache.beam.runners.spark.metrics.MetricsContainerStepMapAccumulator extends org.apache.spark.util.AccumulatorV2<org.apache.beam.runners.core.metrics.MetricsContainerStepMap,org.apache.beam.runners.core.metrics.MetricsContainerStepMap> implements Serializable
-
Serialized Fields
-
value
org.apache.beam.runners.core.metrics.MetricsContainerStepMap value
-
-
-
-
Package org.apache.beam.runners.spark.stateful
-
Class org.apache.beam.runners.spark.stateful.SparkGroupAlsoByWindowViaWindowSet extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.stateful.SparkGroupAlsoByWindowViaWindowSet.StateAndTimers extends java.lang.Object implements Serializable
-
Serialized Fields
-
serTimers
java.util.Collection<byte[]> serTimers
-
state
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Table<java.lang.String,java.lang.String,byte[]> state
-
-
-
-
Package org.apache.beam.runners.spark.structuredstreaming.examples
-
Class org.apache.beam.runners.spark.structuredstreaming.examples.WordCount.CountWords extends org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PCollection<java.lang.String>,org.apache.beam.sdk.values.PCollection<org.apache.beam.sdk.values.KV<java.lang.String,java.lang.Long>>> implements Serializable
-
Class org.apache.beam.runners.spark.structuredstreaming.examples.WordCount.FormatAsTextFn extends org.apache.beam.sdk.transforms.SimpleFunction<org.apache.beam.sdk.values.KV<java.lang.String,java.lang.Long>,java.lang.String> implements Serializable
-
-
Package org.apache.beam.runners.spark.structuredstreaming.metrics
-
Class org.apache.beam.runners.spark.structuredstreaming.metrics.MetricsAccumulator extends org.apache.spark.util.AccumulatorV2<org.apache.beam.runners.core.metrics.MetricsContainerStepMap,org.apache.beam.runners.core.metrics.MetricsContainerStepMap> implements Serializable
-
Serialized Fields
-
value
org.apache.beam.runners.core.metrics.MetricsContainerStepMap value
-
-
-
-
Package org.apache.beam.runners.spark.structuredstreaming.translation.batch.functions
-
Class org.apache.beam.runners.spark.structuredstreaming.translation.batch.functions.GroupAlsoByWindowViaOutputBufferFn extends java.lang.Object implements Serializable
-
Serialized Fields
-
options
java.util.function.Supplier<org.apache.beam.sdk.options.PipelineOptions> options
-
reduceFn
org.apache.beam.runners.core.SystemReduceFn<K,InputT,java.lang.Iterable<InputT>,java.lang.Iterable<InputT>,W extends org.apache.beam.sdk.transforms.windowing.BoundedWindow> reduceFn
-
stateInternalsFactory
org.apache.beam.runners.core.StateInternalsFactory<K> stateInternalsFactory
-
windowingStrategy
org.apache.beam.sdk.values.WindowingStrategy<?,W extends org.apache.beam.sdk.transforms.windowing.BoundedWindow> windowingStrategy
-
-
-
Class org.apache.beam.runners.spark.structuredstreaming.translation.batch.functions.SideInputValues.BaseSideInputValues extends java.lang.Object implements Serializable
-
Serialized Fields
-
binaryValues
byte[][] binaryValues
-
coder
org.apache.beam.sdk.coders.Coder<BinaryT> coder
-
-
-
Class org.apache.beam.runners.spark.structuredstreaming.translation.batch.functions.SideInputValues.ByWindow extends SideInputValues.BaseSideInputValues<org.apache.beam.sdk.util.WindowedValue<T>,java.util.Map<org.apache.beam.sdk.transforms.windowing.BoundedWindow,java.util.List<T>>,T> implements Serializable
-
Class org.apache.beam.runners.spark.structuredstreaming.translation.batch.functions.SideInputValues.Global extends SideInputValues.BaseSideInputValues<T,java.util.List<T>,T> implements Serializable
-
Class org.apache.beam.runners.spark.structuredstreaming.translation.batch.functions.SparkSideInputReader extends java.lang.Object implements Serializable
-
Serialized Fields
-
sideInputs
java.util.Map<java.lang.String,org.apache.spark.broadcast.Broadcast<SideInputValues<?>>> sideInputs
-
-
-
-
Package org.apache.beam.runners.spark.structuredstreaming.translation.utils
-
Package org.apache.beam.runners.spark.translation
-
Class org.apache.beam.runners.spark.translation.BoundedDataset extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.translation.MultiDoFnFunction extends java.lang.Object implements Serializable
-
Serialized Fields
-
additionalOutputTags
java.util.List<org.apache.beam.sdk.values.TupleTag<?>> additionalOutputTags
-
doFn
org.apache.beam.sdk.transforms.DoFn<InputT,OutputT> doFn
-
doFnSchemaInformation
org.apache.beam.sdk.transforms.DoFnSchemaInformation doFnSchemaInformation
-
inputCoder
org.apache.beam.sdk.coders.Coder<InputT> inputCoder
-
mainOutputTag
org.apache.beam.sdk.values.TupleTag<OutputT> mainOutputTag
-
metricsAccum
MetricsContainerStepMapAccumulator metricsAccum
-
options
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
-
outputCoders
java.util.Map<org.apache.beam.sdk.values.TupleTag<?>,org.apache.beam.sdk.coders.Coder<?>> outputCoders
-
sideInputMapping
java.util.Map<java.lang.String,org.apache.beam.sdk.values.PCollectionView<?>> sideInputMapping
-
sideInputs
java.util.Map<org.apache.beam.sdk.values.TupleTag<?>,org.apache.beam.sdk.values.KV<org.apache.beam.sdk.values.WindowingStrategy<?,?>,SideInputBroadcast<?>>> sideInputs
-
stateful
boolean stateful
-
stepName
java.lang.String stepName
-
useBoundedConcurrentOutput
boolean useBoundedConcurrentOutput
-
windowingStrategy
org.apache.beam.sdk.values.WindowingStrategy<?,?> windowingStrategy
-
-
-
Class org.apache.beam.runners.spark.translation.ReifyTimestampsAndWindowsFunction extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.translation.SparkAssignWindowFn extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.translation.SparkCombineFn extends java.lang.Object implements Serializable
-
Serialized Fields
-
combineFn
org.apache.beam.sdk.transforms.CombineWithContext.CombineFnWithContext<ValueT,AccumT,OutputT> combineFn
-
defaultNonMergingCombineStrategy
SparkCombineFn.WindowedAccumulator.Type defaultNonMergingCombineStrategy
-
globalCombine
boolean globalCombine
-
options
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
-
sideInputs
java.util.Map<org.apache.beam.sdk.values.TupleTag<?>,org.apache.beam.sdk.values.KV<org.apache.beam.sdk.values.WindowingStrategy<?,?>,SideInputBroadcast<?>>> sideInputs
-
toValue
org.apache.spark.api.java.function.Function<InputT,ValueT> toValue
-
windowComparator
java.util.Comparator<org.apache.beam.sdk.transforms.windowing.BoundedWindow> windowComparator
-
windowingStrategy
org.apache.beam.sdk.values.WindowingStrategy<?,org.apache.beam.sdk.transforms.windowing.BoundedWindow> windowingStrategy
-
-
-
Class org.apache.beam.runners.spark.translation.SparkExecutableStageContextFactory extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.translation.SparkPCollectionView extends java.lang.Object implements Serializable
-
Serialized Fields
-
pviews
java.util.Map<org.apache.beam.sdk.values.PCollectionView<?>,scala.Tuple2<byte[],org.apache.beam.sdk.coders.Coder<java.lang.Iterable<org.apache.beam.sdk.util.WindowedValue<?>>>>> pviews
-
-
-
Class org.apache.beam.runners.spark.translation.TranslationUtils.CombineGroupedValues extends java.lang.Object implements Serializable
-
Serialized Fields
-
fn
SparkCombineFn<org.apache.beam.sdk.values.KV<K,InputT>,InputT,?,OutputT> fn
-
-
-
Class org.apache.beam.runners.spark.translation.TranslationUtils.TupleTagFilter extends java.lang.Object implements Serializable
-
Serialized Fields
-
tag
org.apache.beam.sdk.values.TupleTag<V> tag
-
-
-
Class org.apache.beam.runners.spark.translation.ValueAndCoderLazySerializable extends java.lang.Object implements Serializable
-
Serialization Methods
-
readObject
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException- Throws:
java.io.IOException
-
writeObject
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException- Throws:
java.io.IOException
-
-
Serialized Fields
-
coderOrBytes
java.lang.Object coderOrBytes
-
value
T value
-
-
-
-
Package org.apache.beam.runners.spark.translation.streaming
-
Class org.apache.beam.runners.spark.translation.streaming.SparkRunnerStreamingContextFactory extends java.lang.Object implements Serializable
-
Class org.apache.beam.runners.spark.translation.streaming.TestDStream extends org.apache.spark.streaming.dstream.InputDStream<org.apache.beam.sdk.util.WindowedValue<T>> implements Serializable
-
Serialized Fields
-
coder
org.apache.beam.sdk.coders.Coder<org.apache.beam.sdk.util.WindowedValue<T>> coder
-
currentEventIndex
int currentEventIndex
-
insertEmptyBatch
boolean insertEmptyBatch
-
lastValidTimeMs
long lastValidTimeMs
-
lastWatermark
org.joda.time.Instant lastWatermark
-
-
-
Class org.apache.beam.runners.spark.translation.streaming.UnboundedDataset extends java.lang.Object implements Serializable
-
Serialized Fields
-
dStream
org.apache.spark.streaming.api.java.JavaDStream<org.apache.beam.sdk.util.WindowedValue<T>> dStream
-
streamSources
java.util.List<java.lang.Integer> streamSources
-
-
-
-
Package org.apache.beam.runners.spark.util
-
Class org.apache.beam.runners.spark.util.ByteArray extends java.lang.Object implements Serializable
-
Serialized Fields
-
value
byte[] value
-
-
-
Class org.apache.beam.runners.spark.util.GlobalWatermarkHolder.SparkWatermarks extends java.lang.Object implements Serializable
-
Serialized Fields
-
highWatermark
org.joda.time.Instant highWatermark
-
lowWatermark
org.joda.time.Instant lowWatermark
-
synchronizedProcessingTime
org.joda.time.Instant synchronizedProcessingTime
-
-
-
Class org.apache.beam.runners.spark.util.SideInputBroadcast extends java.lang.Object implements Serializable
-
Serialized Fields
-
bcast
org.apache.spark.broadcast.Broadcast<byte[]> bcast
-
coder
org.apache.beam.sdk.coders.Coder<T> coder
-
-
-