Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class SVD {
* U is a row-by-row dense matrix
* S is a simple double array of singular values
* V is a 2d array matrix
* See [[denseSVD]] for more documentation
* See `denseSVD` for more documentation
*/
def compute(matrix: RDD[Array[Double]]):
(RDD[Array[Double]], Array[Double], Array[Array[Double]]) = {
Expand Down
3 changes: 1 addition & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,6 @@
<arg>-deprecation</arg>
</args>
<jvmArgs>
<jvmArg>-Xms64m</jvmArg>
<jvmArg>-Xms1024m</jvmArg>
<jvmArg>-Xmx1024m</jvmArg>
<jvmArg>-XX:PermSize=${PermGen}</jvmArg>
Expand Down Expand Up @@ -689,7 +688,7 @@
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>${project.build.directory}/SparkTestSuite.txt</filereports>
<argLine>-Xms64m -Xmx3g</argLine>
<argLine>-Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m</argLine>
<stderr />
</configuration>
<executions>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ case class Exchange(newPartitioning: Partitioning, child: SparkPlan) extends Una
}

/**
* Ensures that the [[catalyst.plans.physical.Partitioning Partitioning]] of input data meets the
* [[catalyst.plans.physical.Distribution Distribution]] requirements for each operator by inserting
* Ensures that the Partitioning of input data meets the
* Distribution requirements for each operator by inserting
* [[Exchange]] Operators where required.
*/
object AddExchange extends Rule[SparkPlan] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution
import org.apache.spark.sql.catalyst.expressions.{Generator, JoinedRow, Literal, Projection}

/**
* Applies a [[catalyst.expressions.Generator Generator]] to a stream of input rows, combining the
* Applies a Generator to a stream of input rows, combining the
* output of each into a new stream of rows. This operation is similar to a `flatMap` in functional
* programming with one important additional feature, which allows the input rows to be joined with
* their output.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.hadoop.io.Writable

/**
* Internal helper class that saves an RDD using a Hive OutputFormat.
* It is based on [[SparkHadoopWriter]].
* It is based on SparkHadoopWriter.
*/
protected[spark]
class SparkHiveHadoopWriter(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with Logging {

/**
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
* For now, if this functionality is desired mix in the in-memory OverrideCatalog.
*/
override def registerTable(
databaseName: Option[String], tableName: String, plan: LogicalPlan): Unit = ???
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ object HiveQl {

/**
* A set of implicit transformations that allow Hive ASTNodes to be rewritten by transformations
* similar to [[catalyst.trees.TreeNode]].
* similar to TreeNode.
*
* Note that this should be considered very experimental and is not indented as a replacement
* for TreeNode. Primarily it should be noted ASTNodes are not immutable and do not appear to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ case class HiveGenericUdaf(

/**
* Converts a Hive Generic User Defined Table Generating Function (UDTF) to a
* [[catalyst.expressions.Generator Generator]]. Note that the semantics of Generators do not allow
* Generator. Note that the semantics of Generators do not allow
* Generators to maintain state in between input rows. Thus UDTFs that rely on partitioning
* dependent operations like calls to `close()` before producing output will not operate the same as
* in Hive. However, in practice this should not affect compatibility for most sane UDTFs
Expand Down