Skip to content

Commit 9c0c125

Browse files
committed
add accumulableInfo
1 parent 00e9cc5 commit 9c0c125

File tree

13 files changed

+100
-10
lines changed

13 files changed

+100
-10
lines changed

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import javax.ws.rs.{GET, PathParam, Produces, QueryParam}
2121
import javax.ws.rs.core.MediaType
2222

2323
import org.apache.spark.executor.{InputMetrics => InternalInputMetrics, OutputMetrics => InternalOutputMetrics, ShuffleReadMetrics => InternalShuffleReadMetrics, ShuffleWriteMetrics => InternalShuffleWriteMetrics, TaskMetrics => InternalTaskMetrics}
24+
import org.apache.spark.scheduler.{AccumulableInfo => InternalAccumulableInfo}
2425
import org.apache.spark.scheduler.StageInfo
2526
import org.apache.spark.status.api._
2627
import org.apache.spark.ui.SparkUI
@@ -87,6 +88,9 @@ object AllStagesResource {
8788
} else {
8889
None
8990
}
91+
92+
val accumulableInfo = stageUiData.accumulables.values.map { convertAccumulableInfo }.toSeq
93+
9094
new StageData(
9195
status = status,
9296
stageId = stageInfo.stageId,
@@ -107,6 +111,7 @@ object AllStagesResource {
107111
schedulingPool = stageUiData.schedulingPool,
108112
name = stageInfo.name,
109113
details = stageInfo.details,
114+
accumulatorUpdates = accumulableInfo,
110115
tasks = taskData,
111116
executorSummary = executorSummary
112117
)
@@ -135,11 +140,17 @@ object AllStagesResource {
135140
host = uiData.taskInfo.host,
136141
taskLocality = uiData.taskInfo.taskLocality.toString(),
137142
speculative = uiData.taskInfo.speculative,
143+
accumulatorUpdates = uiData.taskInfo.accumulables.map { convertAccumulableInfo },
138144
errorMessage = uiData.errorMessage,
139145
taskMetrics = uiData.taskMetrics.map { convertUiTaskMetrics }
140146
)
141147
}
142148

149+
def convertAccumulableInfo(acc: InternalAccumulableInfo): AccumulableInfo = {
150+
new AccumulableInfo(acc.id, acc.name, acc.value)
151+
}
152+
153+
143154
def convertUiTaskMetrics(internal: InternalTaskMetrics): TaskMetrics = {
144155
new TaskMetrics(
145156
executorDeserializeTime = internal.executorDeserializeTime,

core/src/main/scala/org/apache/spark/status/api/v1/JsonRootResource.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,8 @@ private[spark] trait UIRoot {
101101
*/
102102
def withSparkUI[T](appId: String)(f: SparkUI => T): T = {
103103
getSparkUI(appId) match {
104-
case Some(ui) => f(ui)
104+
case Some(ui) =>
105+
f(ui)
105106
case None => throw new NotFoundException("no such app: " + appId)
106107
}
107108
}

core/src/main/scala/org/apache/spark/status/api/v1/api.scala

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ class StageData(
135135
val details: String,
136136
val schedulingPool: String,
137137

138-
//TODO what to do about accumulables?
138+
val accumulatorUpdates: Seq[AccumulableInfo],
139139
val tasks: Option[Map[Long, TaskData]],
140140
val executorSummary:Option[Map[String,ExecutorStageSummary]]
141141
)
@@ -149,6 +149,7 @@ class TaskData(
149149
val host: String,
150150
val taskLocality: String,
151151
val speculative: Boolean,
152+
val accumulatorUpdates: Seq[AccumulableInfo],
152153
val errorMessage: Option[String] = None,
153154
val taskMetrics: Option[TaskMetrics] = None
154155
)
@@ -191,3 +192,18 @@ class ShuffleWriteMetrics(
191192
val writeTime: Long,
192193
val recordsWritten: Long
193194
)
195+
196+
class AccumulableInfo (
197+
val id: Long,
198+
val name: String,
199+
//no partial updates, since they aren't logged. We can add them later
200+
val value: String) {
201+
202+
override def equals(other: Any): Boolean = other match {
203+
case acc: AccumulableInfo =>
204+
this.id == acc.id && this.name == acc.name &&
205+
this.value == acc.value
206+
case _ => false
207+
}
208+
}
209+

core/src/test/resources/HistoryServerExpectations/applications/json_expectation

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
11
[ {
2+
"id" : "local-1426533911241",
3+
"name" : "Spark shell",
4+
"startTime" : "2015-03-16T19:25:10.242GMT",
5+
"endTime" : "2015-03-16T19:25:45.177GMT",
6+
"sparkUser" : "irashid",
7+
"completed" : true
8+
}, {
29
"id" : "local-1425081759269",
310
"name" : "Spark shell",
411
"startTime" : "2015-02-28T00:02:38.277GMT",

core/src/test/resources/HistoryServerExpectations/applications/local-1422981780767/stages/1/json_expectation

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
"name" : "map at <console>:14",
1919
"details" : "org.apache.spark.rdd.RDD.map(RDD.scala:271)\n$line10.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:14)\n$line10.$read$$iwC$$iwC$$iwC.<init>(<console>:19)\n$line10.$read$$iwC$$iwC.<init>(<console>:21)\n$line10.$read$$iwC.<init>(<console>:23)\n$line10.$read.<init>(<console>:25)\n$line10.$read$.<init>(<console>:29)\n$line10.$read$.<clinit>(<console>)\n$line10.$eval$.<init>(<console>:7)\n$line10.$eval$.<clinit>(<console>)\n$line10.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
2020
"schedulingPool" : "default",
21+
"accumulatorUpdates" : [ ],
2122
"tasks" : {
2223
"8" : {
2324
"taskId" : 8,
@@ -28,6 +29,7 @@
2829
"host" : "localhost",
2930
"taskLocality" : "PROCESS_LOCAL",
3031
"speculative" : false,
32+
"accumulatorUpdates" : [ ],
3133
"taskMetrics" : {
3234
"executorDeserializeTime" : 1,
3335
"executorRunTime" : 435,
@@ -56,6 +58,7 @@
5658
"host" : "localhost",
5759
"taskLocality" : "PROCESS_LOCAL",
5860
"speculative" : false,
61+
"accumulatorUpdates" : [ ],
5962
"taskMetrics" : {
6063
"executorDeserializeTime" : 2,
6164
"executorRunTime" : 434,
@@ -84,6 +87,7 @@
8487
"host" : "localhost",
8588
"taskLocality" : "PROCESS_LOCAL",
8689
"speculative" : false,
90+
"accumulatorUpdates" : [ ],
8791
"taskMetrics" : {
8892
"executorDeserializeTime" : 2,
8993
"executorRunTime" : 434,
@@ -112,6 +116,7 @@
112116
"host" : "localhost",
113117
"taskLocality" : "PROCESS_LOCAL",
114118
"speculative" : false,
119+
"accumulatorUpdates" : [ ],
115120
"taskMetrics" : {
116121
"executorDeserializeTime" : 2,
117122
"executorRunTime" : 434,
@@ -140,6 +145,7 @@
140145
"host" : "localhost",
141146
"taskLocality" : "PROCESS_LOCAL",
142147
"speculative" : false,
148+
"accumulatorUpdates" : [ ],
143149
"taskMetrics" : {
144150
"executorDeserializeTime" : 2,
145151
"executorRunTime" : 434,
@@ -168,6 +174,7 @@
168174
"host" : "localhost",
169175
"taskLocality" : "PROCESS_LOCAL",
170176
"speculative" : false,
177+
"accumulatorUpdates" : [ ],
171178
"taskMetrics" : {
172179
"executorDeserializeTime" : 1,
173180
"executorRunTime" : 436,
@@ -196,6 +203,7 @@
196203
"host" : "localhost",
197204
"taskLocality" : "PROCESS_LOCAL",
198205
"speculative" : false,
206+
"accumulatorUpdates" : [ ],
199207
"taskMetrics" : {
200208
"executorDeserializeTime" : 2,
201209
"executorRunTime" : 434,
@@ -224,6 +232,7 @@
224232
"host" : "localhost",
225233
"taskLocality" : "PROCESS_LOCAL",
226234
"speculative" : false,
235+
"accumulatorUpdates" : [ ],
227236
"taskMetrics" : {
228237
"executorDeserializeTime" : 1,
229238
"executorRunTime" : 435,

core/src/test/resources/HistoryServerExpectations/applications/local-1422981780767/stages/json_expectation

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
"diskBytesSpilled" : 0,
1818
"name" : "count at <console>:17",
1919
"details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line19.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:17)\n$line19.$read$$iwC$$iwC$$iwC.<init>(<console>:22)\n$line19.$read$$iwC$$iwC.<init>(<console>:24)\n$line19.$read$$iwC.<init>(<console>:26)\n$line19.$read.<init>(<console>:28)\n$line19.$read$.<init>(<console>:32)\n$line19.$read$.<clinit>(<console>)\n$line19.$eval$.<init>(<console>:7)\n$line19.$eval$.<clinit>(<console>)\n$line19.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
20-
"schedulingPool" : "default"
20+
"schedulingPool" : "default",
21+
"accumulatorUpdates" : [ ]
2122
}, {
2223
"status" : "Complete",
2324
"stageId" : 1,
@@ -37,7 +38,8 @@
3738
"diskBytesSpilled" : 0,
3839
"name" : "map at <console>:14",
3940
"details" : "org.apache.spark.rdd.RDD.map(RDD.scala:271)\n$line10.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:14)\n$line10.$read$$iwC$$iwC$$iwC.<init>(<console>:19)\n$line10.$read$$iwC$$iwC.<init>(<console>:21)\n$line10.$read$$iwC.<init>(<console>:23)\n$line10.$read.<init>(<console>:25)\n$line10.$read$.<init>(<console>:29)\n$line10.$read$.<clinit>(<console>)\n$line10.$eval$.<init>(<console>:7)\n$line10.$eval$.<clinit>(<console>)\n$line10.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
40-
"schedulingPool" : "default"
41+
"schedulingPool" : "default",
42+
"accumulatorUpdates" : [ ]
4143
}, {
4244
"status" : "Complete",
4345
"stageId" : 0,
@@ -57,7 +59,8 @@
5759
"diskBytesSpilled" : 0,
5860
"name" : "count at <console>:15",
5961
"details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line9.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:15)\n$line9.$read$$iwC$$iwC$$iwC.<init>(<console>:20)\n$line9.$read$$iwC$$iwC.<init>(<console>:22)\n$line9.$read$$iwC.<init>(<console>:24)\n$line9.$read.<init>(<console>:26)\n$line9.$read$.<init>(<console>:30)\n$line9.$read$.<clinit>(<console>)\n$line9.$eval$.<init>(<console>:7)\n$line9.$eval$.<clinit>(<console>)\n$line9.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
60-
"schedulingPool" : "default"
62+
"schedulingPool" : "default",
63+
"accumulatorUpdates" : [ ]
6164
}, {
6265
"status" : "Failed",
6366
"stageId" : 2,
@@ -77,5 +80,6 @@
7780
"diskBytesSpilled" : 0,
7881
"name" : "count at <console>:20",
7982
"details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line11.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:20)\n$line11.$read$$iwC$$iwC$$iwC.<init>(<console>:25)\n$line11.$read$$iwC$$iwC.<init>(<console>:27)\n$line11.$read$$iwC.<init>(<console>:29)\n$line11.$read.<init>(<console>:31)\n$line11.$read$.<init>(<console>:35)\n$line11.$read$.<clinit>(<console>)\n$line11.$eval$.<init>(<console>:7)\n$line11.$eval$.<clinit>(<console>)\n$line11.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
80-
"schedulingPool" : "default"
83+
"schedulingPool" : "default",
84+
"accumulatorUpdates" : [ ]
8185
} ]

core/src/test/resources/HistoryServerExpectations/applications/local-1422981780767/stages?status=complete/json_expectation

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
"diskBytesSpilled" : 0,
1818
"name" : "count at <console>:17",
1919
"details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line19.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:17)\n$line19.$read$$iwC$$iwC$$iwC.<init>(<console>:22)\n$line19.$read$$iwC$$iwC.<init>(<console>:24)\n$line19.$read$$iwC.<init>(<console>:26)\n$line19.$read.<init>(<console>:28)\n$line19.$read$.<init>(<console>:32)\n$line19.$read$.<clinit>(<console>)\n$line19.$eval$.<init>(<console>:7)\n$line19.$eval$.<clinit>(<console>)\n$line19.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
20-
"schedulingPool" : "default"
20+
"schedulingPool" : "default",
21+
"accumulatorUpdates" : [ ]
2122
}, {
2223
"status" : "Complete",
2324
"stageId" : 1,
@@ -37,7 +38,8 @@
3738
"diskBytesSpilled" : 0,
3839
"name" : "map at <console>:14",
3940
"details" : "org.apache.spark.rdd.RDD.map(RDD.scala:271)\n$line10.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:14)\n$line10.$read$$iwC$$iwC$$iwC.<init>(<console>:19)\n$line10.$read$$iwC$$iwC.<init>(<console>:21)\n$line10.$read$$iwC.<init>(<console>:23)\n$line10.$read.<init>(<console>:25)\n$line10.$read$.<init>(<console>:29)\n$line10.$read$.<clinit>(<console>)\n$line10.$eval$.<init>(<console>:7)\n$line10.$eval$.<clinit>(<console>)\n$line10.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
40-
"schedulingPool" : "default"
41+
"schedulingPool" : "default",
42+
"accumulatorUpdates" : [ ]
4143
}, {
4244
"status" : "Complete",
4345
"stageId" : 0,
@@ -57,5 +59,6 @@
5759
"diskBytesSpilled" : 0,
5860
"name" : "count at <console>:15",
5961
"details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line9.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:15)\n$line9.$read$$iwC$$iwC$$iwC.<init>(<console>:20)\n$line9.$read$$iwC$$iwC.<init>(<console>:22)\n$line9.$read$$iwC.<init>(<console>:24)\n$line9.$read.<init>(<console>:26)\n$line9.$read$.<init>(<console>:30)\n$line9.$read$.<clinit>(<console>)\n$line9.$eval$.<init>(<console>:7)\n$line9.$eval$.<clinit>(<console>)\n$line9.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
60-
"schedulingPool" : "default"
62+
"schedulingPool" : "default",
63+
"accumulatorUpdates" : [ ]
6164
} ]

core/src/test/resources/HistoryServerExpectations/applications/local-1422981780767/stages?status=failed/json_expectation

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,5 +17,6 @@
1717
"diskBytesSpilled" : 0,
1818
"name" : "count at <console>:20",
1919
"details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line11.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:20)\n$line11.$read$$iwC$$iwC$$iwC.<init>(<console>:25)\n$line11.$read$$iwC$$iwC.<init>(<console>:27)\n$line11.$read$$iwC.<init>(<console>:29)\n$line11.$read.<init>(<console>:31)\n$line11.$read$.<init>(<console>:35)\n$line11.$read$.<clinit>(<console>)\n$line11.$eval$.<init>(<console>:7)\n$line11.$eval$.<clinit>(<console>)\n$line11.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)",
20-
"schedulingPool" : "default"
20+
"schedulingPool" : "default",
21+
"accumulatorUpdates" : [ ]
2122
} ]

core/src/test/resources/HistoryServerExpectations/applications?minDate=2015-02-10/json_expectation

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
11
[ {
2+
"id" : "local-1426533911241",
3+
"name" : "Spark shell",
4+
"startTime" : "2015-03-16T19:25:10.242GMT",
5+
"endTime" : "2015-03-16T19:25:45.177GMT",
6+
"sparkUser" : "irashid",
7+
"completed" : true
8+
}, {
29
"id" : "local-1425081759269",
310
"name" : "Spark shell",
411
"startTime" : "2015-02-28T00:02:38.277GMT",

core/src/test/resources/HistoryServerExpectations/applications?status=completed/json_expectation

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
11
[ {
2+
"id" : "local-1426533911241",
3+
"name" : "Spark shell",
4+
"startTime" : "2015-03-16T19:25:10.242GMT",
5+
"endTime" : "2015-03-16T19:25:45.177GMT",
6+
"sparkUser" : "irashid",
7+
"completed" : true
8+
}, {
29
"id" : "local-1425081759269",
310
"name" : "Spark shell",
411
"startTime" : "2015-02-28T00:02:38.277GMT",

0 commit comments

Comments
 (0)