Skip to content

Commit 73702f9

Browse files
committed
[SPARK-5038] Add explicit return type for implicit functions.
This is a follow up PR for rest of Spark (outside Spark SQL). The original PR for Spark SQL can be found at #3859
1 parent 352ed6b commit 73702f9

File tree

6 files changed

+64
-63
lines changed

6 files changed

+64
-63
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1708,19 +1708,19 @@ object SparkContext extends Logging {
17081708

17091709
// Implicit conversions to common Writable types, for saveAsSequenceFile
17101710

1711-
implicit def intToIntWritable(i: Int) = new IntWritable(i)
1711+
implicit def intToIntWritable(i: Int): IntWritable = new IntWritable(i)
17121712

1713-
implicit def longToLongWritable(l: Long) = new LongWritable(l)
1713+
implicit def longToLongWritable(l: Long): LongWritable = new LongWritable(l)
17141714

1715-
implicit def floatToFloatWritable(f: Float) = new FloatWritable(f)
1715+
implicit def floatToFloatWritable(f: Float): FloatWritable = new FloatWritable(f)
17161716

1717-
implicit def doubleToDoubleWritable(d: Double) = new DoubleWritable(d)
1717+
implicit def doubleToDoubleWritable(d: Double): DoubleWritable = new DoubleWritable(d)
17181718

1719-
implicit def boolToBoolWritable (b: Boolean) = new BooleanWritable(b)
1719+
implicit def boolToBoolWritable (b: Boolean): BooleanWritable = new BooleanWritable(b)
17201720

1721-
implicit def bytesToBytesWritable (aob: Array[Byte]) = new BytesWritable(aob)
1721+
implicit def bytesToBytesWritable (aob: Array[Byte]): BytesWritable = new BytesWritable(aob)
17221722

1723-
implicit def stringToText(s: String) = new Text(s)
1723+
implicit def stringToText(s: String): Text = new Text(s)
17241724

17251725
private implicit def arrayToArrayWritable[T <% Writable: ClassTag](arr: Traversable[T])
17261726
: ArrayWritable = {

core/src/main/scala/org/apache/spark/util/Vector.scala

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@ import org.apache.spark.util.random.XORShiftRandom
2424

2525
@deprecated("Use Vectors.dense from Spark's mllib.linalg package instead.", "1.0.0")
2626
class Vector(val elements: Array[Double]) extends Serializable {
27-
def length = elements.length
27+
def length: Int = elements.length
2828

29-
def apply(index: Int) = elements(index)
29+
def apply(index: Int): Double = elements(index)
3030

3131
def + (other: Vector): Vector = {
3232
if (length != other.length) {
@@ -35,7 +35,7 @@ class Vector(val elements: Array[Double]) extends Serializable {
3535
Vector(length, i => this(i) + other(i))
3636
}
3737

38-
def add(other: Vector) = this + other
38+
def add(other: Vector): Vector = this + other
3939

4040
def - (other: Vector): Vector = {
4141
if (length != other.length) {
@@ -44,7 +44,7 @@ class Vector(val elements: Array[Double]) extends Serializable {
4444
Vector(length, i => this(i) - other(i))
4545
}
4646

47-
def subtract(other: Vector) = this - other
47+
def subtract(other: Vector): Vector = this - other
4848

4949
def dot(other: Vector): Double = {
5050
if (length != other.length) {
@@ -93,19 +93,19 @@ class Vector(val elements: Array[Double]) extends Serializable {
9393
this
9494
}
9595

96-
def addInPlace(other: Vector) = this +=other
96+
def addInPlace(other: Vector): Vector = this +=other
9797

9898
def * (scale: Double): Vector = Vector(length, i => this(i) * scale)
9999

100-
def multiply (d: Double) = this * d
100+
def multiply (d: Double): Vector = this * d
101101

102102
def / (d: Double): Vector = this * (1 / d)
103103

104-
def divide (d: Double) = this / d
104+
def divide (d: Double): Vector = this / d
105105

106-
def unary_- = this * -1
106+
def unary_- : Vector = this * -1
107107

108-
def sum = elements.reduceLeft(_ + _)
108+
def sum: Double = elements.reduceLeft(_ + _)
109109

110110
def squaredDist(other: Vector): Double = {
111111
var ans = 0.0
@@ -119,40 +119,40 @@ class Vector(val elements: Array[Double]) extends Serializable {
119119

120120
def dist(other: Vector): Double = math.sqrt(squaredDist(other))
121121

122-
override def toString = elements.mkString("(", ", ", ")")
122+
override def toString: String = elements.mkString("(", ", ", ")")
123123
}
124124

125125
object Vector {
126-
def apply(elements: Array[Double]) = new Vector(elements)
126+
def apply(elements: Array[Double]): Vector = new Vector(elements)
127127

128-
def apply(elements: Double*) = new Vector(elements.toArray)
128+
def apply(elements: Double*): Vector = new Vector(elements.toArray)
129129

130130
def apply(length: Int, initializer: Int => Double): Vector = {
131131
val elements: Array[Double] = Array.tabulate(length)(initializer)
132132
new Vector(elements)
133133
}
134134

135-
def zeros(length: Int) = new Vector(new Array[Double](length))
135+
def zeros(length: Int): Vector = new Vector(new Array[Double](length))
136136

137-
def ones(length: Int) = Vector(length, _ => 1)
137+
def ones(length: Int): Vector = Vector(length, _ => 1)
138138

139139
/**
140140
* Creates this [[org.apache.spark.util.Vector]] of given length containing random numbers
141141
* between 0.0 and 1.0. Optional scala.util.Random number generator can be provided.
142142
*/
143-
def random(length: Int, random: Random = new XORShiftRandom()) =
143+
def random(length: Int, random: Random = new XORShiftRandom()): Vector =
144144
Vector(length, _ => random.nextDouble())
145145

146146
class Multiplier(num: Double) {
147-
def * (vec: Vector) = vec * num
147+
def * (vec: Vector): Vector = vec * num
148148
}
149149

150-
implicit def doubleToMultiplier(num: Double) = new Multiplier(num)
150+
implicit def doubleToMultiplier(num: Double): Multiplier = new Multiplier(num)
151151

152152
implicit object VectorAccumParam extends org.apache.spark.AccumulatorParam[Vector] {
153-
def addInPlace(t1: Vector, t2: Vector) = t1 + t2
153+
def addInPlace(t1: Vector, t2: Vector): Vector = t1 + t2
154154

155-
def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
155+
def zero(initialValue: Vector): Vector = Vector.zeros(initialValue.length)
156156
}
157157

158158
}

graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartitionBuilder.scala

Lines changed: 32 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -129,44 +129,45 @@ private[impl] case class EdgeWithLocalIds[@specialized ED](
129129
srcId: VertexId, dstId: VertexId, localSrcId: Int, localDstId: Int, attr: ED)
130130

131131
private[impl] object EdgeWithLocalIds {
132-
implicit def lexicographicOrdering[ED] = new Ordering[EdgeWithLocalIds[ED]] {
133-
override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
134-
if (a.srcId == b.srcId) {
135-
if (a.dstId == b.dstId) 0
136-
else if (a.dstId < b.dstId) -1
132+
implicit def lexicographicOrdering[ED]: Ordering[EdgeWithLocalIds[ED]] =
133+
new Ordering[EdgeWithLocalIds[ED]] {
134+
override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
135+
if (a.srcId == b.srcId) {
136+
if (a.dstId == b.dstId) 0
137+
else if (a.dstId < b.dstId) -1
138+
else 1
139+
} else if (a.srcId < b.srcId) -1
137140
else 1
138-
} else if (a.srcId < b.srcId) -1
139-
else 1
141+
}
140142
}
141-
}
142143

143-
private[graphx] def edgeArraySortDataFormat[ED]
144-
= new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
145-
override def getKey(
146-
data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
147-
data(pos)
148-
}
144+
private[graphx] def edgeArraySortDataFormat[ED] = {
145+
new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
146+
override def getKey(data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
147+
data(pos)
148+
}
149149

150-
override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
151-
val tmp = data(pos0)
152-
data(pos0) = data(pos1)
153-
data(pos1) = tmp
154-
}
150+
override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
151+
val tmp = data(pos0)
152+
data(pos0) = data(pos1)
153+
data(pos1) = tmp
154+
}
155155

156-
override def copyElement(
157-
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
158-
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
159-
dst(dstPos) = src(srcPos)
160-
}
156+
override def copyElement(
157+
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
158+
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
159+
dst(dstPos) = src(srcPos)
160+
}
161161

162-
override def copyRange(
163-
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
164-
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
165-
System.arraycopy(src, srcPos, dst, dstPos, length)
166-
}
162+
override def copyRange(
163+
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
164+
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
165+
System.arraycopy(src, srcPos, dst, dstPos, length)
166+
}
167167

168-
override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
169-
new Array[EdgeWithLocalIds[ED]](length)
168+
override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
169+
new Array[EdgeWithLocalIds[ED]](length)
170+
}
170171
}
171172
}
172173
}

graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,8 @@ object ShippableVertexPartition {
7474
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
7575
* `ShippableVertexPartition`.
7676
*/
77-
implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD]) =
78-
new ShippableVertexPartitionOps(partition)
77+
implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD])
78+
: ShippableVertexPartitionOps[VD] = new ShippableVertexPartitionOps(partition)
7979

8080
/**
8181
* Implicit evidence that `ShippableVertexPartition` is a member of the

graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartition.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@ private[graphx] object VertexPartition {
3838
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
3939
* `VertexPartition`.
4040
*/
41-
implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD]) =
42-
new VertexPartitionOps(partition)
41+
implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD])
42+
: VertexPartitionOps[VD] = new VertexPartitionOps(partition)
4343

4444
/**
4545
* Implicit evidence that `VertexPartition` is a member of the `VertexPartitionBaseOpsConstructor`

graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartitionBaseOps.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -238,8 +238,8 @@ private[graphx] abstract class VertexPartitionBaseOps
238238
* because these methods return a `Self` and this implicit conversion re-wraps that in a
239239
* `VertexPartitionBaseOps`. This relies on the context bound on `Self`.
240240
*/
241-
private implicit def toOps[VD2: ClassTag](
242-
partition: Self[VD2]): VertexPartitionBaseOps[VD2, Self] = {
241+
private implicit def toOps[VD2: ClassTag](partition: Self[VD2])
242+
: VertexPartitionBaseOps[VD2, Self] = {
243243
implicitly[VertexPartitionBaseOpsConstructor[Self]].toOps(partition)
244244
}
245245
}

0 commit comments

Comments
 (0)