Skip to content

Commit 2cc7353

Browse files
committed
Add missing file.
1 parent d5eab1f commit 2cc7353

File tree

1 file changed

+76
-0
lines changed

1 file changed

+76
-0
lines changed
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.api.java
19+
20+
import org.apache.spark.{SparkStageInfo, SparkJobInfo, SparkContext}
21+
22+
/**
23+
* Low-level status reporting APIs for monitoring job and stage progress.
24+
*
25+
* These APIs intentionally provide very weak consistency semantics; consumers of these APIs should
26+
* be prepared to handle empty / missing information. For example, a job's stage ids may be known
27+
* but the status API may not have any information about the details of those stages, so
28+
* `getStageInfo` could potentially return `null` for a valid stage id.
29+
*
30+
* To limit memory usage, these APIs only provide information on recent jobs / stages. These APIs
31+
* will provide information for the last `spark.ui.retainedStages` stages and
32+
* `spark.ui.retainedJobs` jobs.
33+
*/
34+
class JavaSparkStatusAPI private (sc: SparkContext) {
35+
36+
/**
37+
* Return a list of all known jobs in a particular job group. If `jobGroup` is `null`, then
38+
* returns all known jobs that are not associated with a job group.
39+
*
40+
* The returned list may contain running, failed, and completed jobs, and may vary across
41+
* invocations of this method. This method does not guarantee the order of the elements in
42+
* its result.
43+
*/
44+
def getJobIdsForGroup(jobGroup: String): Array[Int] = sc.statusAPI.getJobIdsForGroup(jobGroup)
45+
46+
/**
47+
* Returns an array containing the ids of all active stages.
48+
*
49+
* This method does not guarantee the order of the elements in its result.
50+
*/
51+
def getActiveStageIds(): Array[Int] = sc.statusAPI.getActiveStageIds()
52+
53+
/**
54+
* Returns an array containing the ids of all active jobs.
55+
*
56+
* This method does not guarantee the order of the elements in its result.
57+
*/
58+
def getActiveJobIds(): Array[Int] = sc.statusAPI.getActiveJobIds()
59+
60+
/**
61+
* Returns job information, or `null` if the job info could not be found or was garbage collected.
62+
*/
63+
def getJobInfo(jobId: Int): SparkJobInfo = sc.statusAPI.getJobInfo(jobId).orNull
64+
65+
/**
66+
* Returns stage information, or `null` if the stage info could not be found or was
67+
* garbage collected.
68+
*/
69+
def getStageInfo(stageId: Int): SparkStageInfo = sc.statusAPI.getStageInfo(stageId).orNull
70+
}
71+
72+
private[spark] object JavaSparkStatusAPI {
73+
def apply(sc: SparkContext): JavaSparkStatusAPI = {
74+
new JavaSparkStatusAPI(sc)
75+
}
76+
}

0 commit comments

Comments
 (0)