Package org.apache.iceberg.spark.source
Class SparkMicroBatchStream
- java.lang.Object
-
- org.apache.iceberg.spark.source.SparkMicroBatchStream
-
- All Implemented Interfaces:
org.apache.spark.sql.connector.read.streaming.MicroBatchStream
,org.apache.spark.sql.connector.read.streaming.SparkDataStream
,org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
public class SparkMicroBatchStream extends java.lang.Object implements org.apache.spark.sql.connector.read.streaming.MicroBatchStream, org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
-
-
Method Summary
All Methods Instance Methods Concrete Methods Modifier and Type Method Description void
commit(org.apache.spark.sql.connector.read.streaming.Offset end)
org.apache.spark.sql.connector.read.PartitionReaderFactory
createReaderFactory()
org.apache.spark.sql.connector.read.streaming.Offset
deserializeOffset(java.lang.String json)
org.apache.spark.sql.connector.read.streaming.ReadLimit
getDefaultReadLimit()
org.apache.spark.sql.connector.read.streaming.Offset
initialOffset()
org.apache.spark.sql.connector.read.streaming.Offset
latestOffset()
org.apache.spark.sql.connector.read.streaming.Offset
latestOffset(org.apache.spark.sql.connector.read.streaming.Offset startOffset, org.apache.spark.sql.connector.read.streaming.ReadLimit limit)
org.apache.spark.sql.connector.read.InputPartition[]
planInputPartitions(org.apache.spark.sql.connector.read.streaming.Offset start, org.apache.spark.sql.connector.read.streaming.Offset end)
void
stop()
-
-
-
Method Detail
-
latestOffset
public org.apache.spark.sql.connector.read.streaming.Offset latestOffset()
- Specified by:
latestOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.MicroBatchStream
-
planInputPartitions
public org.apache.spark.sql.connector.read.InputPartition[] planInputPartitions(org.apache.spark.sql.connector.read.streaming.Offset start, org.apache.spark.sql.connector.read.streaming.Offset end)
- Specified by:
planInputPartitions
in interfaceorg.apache.spark.sql.connector.read.streaming.MicroBatchStream
-
createReaderFactory
public org.apache.spark.sql.connector.read.PartitionReaderFactory createReaderFactory()
- Specified by:
createReaderFactory
in interfaceorg.apache.spark.sql.connector.read.streaming.MicroBatchStream
-
initialOffset
public org.apache.spark.sql.connector.read.streaming.Offset initialOffset()
- Specified by:
initialOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
deserializeOffset
public org.apache.spark.sql.connector.read.streaming.Offset deserializeOffset(java.lang.String json)
- Specified by:
deserializeOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
commit
public void commit(org.apache.spark.sql.connector.read.streaming.Offset end)
- Specified by:
commit
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
stop
public void stop()
- Specified by:
stop
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
latestOffset
public org.apache.spark.sql.connector.read.streaming.Offset latestOffset(org.apache.spark.sql.connector.read.streaming.Offset startOffset, org.apache.spark.sql.connector.read.streaming.ReadLimit limit)
- Specified by:
latestOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
-
getDefaultReadLimit
public org.apache.spark.sql.connector.read.streaming.ReadLimit getDefaultReadLimit()
- Specified by:
getDefaultReadLimit
in interfaceorg.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
-
-