@@ -24,14 +24,13 @@ import org.apache.spark.streaming.ui.StreamingJobProgressListener._
2424
2525private [ui] case class OutputOpIdAndSparkJobId (outputOpId : OutputOpId , sparkJobId : SparkJobId )
2626
27- private [ui] class BatchUIData (
27+ private [ui] case class BatchUIData (
2828 val batchTime : Time ,
2929 val receiverNumRecords : Map [Int , Long ],
3030 val submissionTime : Long ,
3131 val processingStartTime : Option [Long ],
32- val processingEndTime : Option [Long ]) {
33-
34- var outputOpIdSparkJobIdPairs : Seq [OutputOpIdAndSparkJobId ] = Seq .empty
32+ val processingEndTime : Option [Long ],
33+ var outputOpIdSparkJobIdPairs : Seq [OutputOpIdAndSparkJobId ] = Seq .empty) {
3534
3635 /**
3736 * Time taken for the first job of this batch to start processing from the time this batch
@@ -60,26 +59,6 @@ private[ui] class BatchUIData(
6059 * The number of recorders received by the receivers in this batch.
6160 */
6261 def numRecords : Long = receiverNumRecords.map(_._2).sum
63-
64- def canEqual (other : Any ): Boolean = other.isInstanceOf [BatchUIData ]
65-
66- override def equals (other : Any ): Boolean = other match {
67- case that : BatchUIData =>
68- (that canEqual this ) &&
69- outputOpIdSparkJobIdPairs == that.outputOpIdSparkJobIdPairs &&
70- batchTime == that.batchTime &&
71- receiverNumRecords == that.receiverNumRecords &&
72- submissionTime == that.submissionTime &&
73- processingStartTime == that.processingStartTime &&
74- processingEndTime == that.processingEndTime
75- case _ => false
76- }
77-
78- override def hashCode (): Int = {
79- val state = Seq (outputOpIdSparkJobIdPairs, batchTime, receiverNumRecords, submissionTime,
80- processingStartTime, processingEndTime)
81- state.map(_.hashCode()).foldLeft(0 )((a, b) => 31 * a + b)
82- }
8362}
8463
8564private [ui] object BatchUIData {
0 commit comments