This repository was archived by the owner on May 9, 2024. It is now read-only.
  
  
  
  
    
    
    
      
    
  
  
    
File tree Expand file tree Collapse file tree 2 files changed +5
-5
lines changed 
sql/core/src/main/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +5
-5
lines changed Original file line number Diff line number Diff line change @@ -120,7 +120,7 @@ private[sql] class ParquetRelation2(
120120    private  val  maybeDataSchema :  Option [StructType ],
121121    private  val  maybePartitionSpec :  Option [PartitionSpec ],
122122    parameters : Map [String , String ])(
123-     @ transient  val  sqlContext :  SQLContext )
123+     val  sqlContext :  SQLContext )
124124  extends  HadoopFsRelation (maybePartitionSpec)
125125  with  Logging  {
126126
Original file line number Diff line number Diff line change @@ -608,10 +608,10 @@ abstract class HadoopFsRelation private[sql](maybePartitionSpec: Option[Partitio
608608   * @since  1.4.0 
609609   */  
610610  def  buildScan (
611-     requiredColumns : Array [String ],
612-     filters : Array [Filter ],
613-     inputFiles : Array [FileStatus ],
614-     broadcastedConf : Broadcast [SerializableWritable [Configuration ]]):  RDD [Row ] =  {
611+        requiredColumns : Array [String ],
612+        filters : Array [Filter ],
613+        inputFiles : Array [FileStatus ],
614+        broadcastedConf : Broadcast [SerializableWritable [Configuration ]]):  RDD [Row ] =  {
615615    buildScan(requiredColumns, filters, inputFiles)
616616  }
617617
 
 
   
 
     
   
   
          
    
    
     
    
      
     
     
    You can’t perform that action at this time.
  
 
    
  
    
      
        
     
       
      
     
   
 
    
    
  
 
  
 
     
    
0 commit comments