Skip to content

Commit ae36110

Browse files
committed
Fix style errors
1 parent e4fbd32 commit ae36110

File tree

5 files changed

+10
-7
lines changed

5 files changed

+10
-7
lines changed

graphx/src/main/scala/org/apache/spark/graphx/VertexRDD.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -396,7 +396,8 @@ object VertexRDD {
396396
* @param numPartitions the desired number of partitions for the resulting `VertexRDD`
397397
* @param defaultVal the vertex attribute to use when creating missing vertices
398398
*/
399-
def fromEdges[VD: ClassTag](edges: EdgeRDD[_, _], numPartitions: Int, defaultVal: VD): VertexRDD[VD] = {
399+
def fromEdges[VD: ClassTag](
400+
edges: EdgeRDD[_, _], numPartitions: Int, defaultVal: VD): VertexRDD[VD] = {
400401
val routingTables = createRoutingTables(edges, new HashPartitioner(numPartitions))
401402
val vertexPartitions = routingTables.mapPartitions({ routingTableIter =>
402403
val routingTable =

graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,8 @@ import org.apache.spark.graphx.util.collection.PrimitiveKeyOpenHashMap
4040
* @param activeSet an optional active vertex set for filtering computation on the edges
4141
*/
4242
private[graphx]
43-
class EdgePartition[@specialized(Char, Int, Boolean, Byte, Long, Float, Double) ED: ClassTag, VD: ClassTag](
43+
class EdgePartition[
44+
@specialized(Char, Int, Boolean, Byte, Long, Float, Double) ED: ClassTag, VD: ClassTag](
4445
val srcIds: Array[VertexId],
4546
val dstIds: Array[VertexId],
4647
val data: Array[ED],

graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -230,8 +230,8 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected (
230230
// updateF preserves type, so we can use incremental replication
231231
val newVerts = vertices.leftJoin(other)(updateF).cache()
232232
val changedVerts = vertices.asInstanceOf[VertexRDD[VD2]].diff(newVerts)
233-
val newReplicatedVertexView =
234-
replicatedVertexView.asInstanceOf[ReplicatedVertexView[VD2, ED]].updateVertices(changedVerts)
233+
val newReplicatedVertexView = replicatedVertexView.asInstanceOf[ReplicatedVertexView[VD2, ED]]
234+
.updateVertices(changedVerts)
235235
new GraphImpl(newVerts, newReplicatedVertexView)
236236
} else {
237237
// updateF does not preserve type, so we must re-replicate all vertices

graphx/src/main/scala/org/apache/spark/graphx/impl/ReplicatedVertexView.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,8 @@ class ReplicatedVertexView[VD: ClassTag, ED: ClassTag](
3838
var hasDstId: Boolean = false) {
3939

4040
/**
41-
* Return a new `ReplicatedVertexView` with the specified `EdgeRDD`, which must have the same shipping level.
41+
* Return a new `ReplicatedVertexView` with the specified `EdgeRDD`, which must have the same
42+
* shipping level.
4243
*/
4344
def withEdges[VD2: ClassTag, ED2: ClassTag](
4445
edges_ : EdgeRDD[ED2, VD2]): ReplicatedVertexView[VD2, ED2] = {

graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ object ShippableVertexPartition {
3939
apply(iter, RoutingTablePartition.empty, null.asInstanceOf[VD])
4040

4141
/**
42-
* Construct a `ShippableVertexPartition` from the given vertices with the specified routing table,
43-
* filling in missing vertices mentioned in the routing table using `defaultVal`.
42+
* Construct a `ShippableVertexPartition` from the given vertices with the specified routing
43+
* table, filling in missing vertices mentioned in the routing table using `defaultVal`.
4444
*/
4545
def apply[VD: ClassTag](
4646
iter: Iterator[(VertexId, VD)], routingTable: RoutingTablePartition, defaultVal: VD)

0 commit comments

Comments
 (0)