Skip to content

Commit bcc4c1a

Browse files
committed
modify limit to limit() in TaskSetMananger.scala
1 parent c3dd2a2 commit bcc4c1a

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -488,11 +488,11 @@ private[spark] class TaskSetManager(
488488
abort(s"$msg Exception during serialization: $e")
489489
throw new TaskNotSerializableException(e)
490490
}
491-
if (serializedTask.limit > TaskSetManager.TASK_SIZE_TO_WARN_KB * 1024 &&
491+
if (serializedTask.limit() > TaskSetManager.TASK_SIZE_TO_WARN_KB * 1024 &&
492492
!emittedTaskSizeWarning) {
493493
emittedTaskSizeWarning = true
494494
logWarning(s"Stage ${task.stageId} contains a task of very large size " +
495-
s"(${serializedTask.limit / 1024} KB). The maximum recommended task size is " +
495+
s"(${serializedTask.limit() / 1024} KB). The maximum recommended task size is " +
496496
s"${TaskSetManager.TASK_SIZE_TO_WARN_KB} KB.")
497497
}
498498
addRunningTask(taskId)
@@ -502,7 +502,7 @@ private[spark] class TaskSetManager(
502502
// val timeTaken = clock.getTime() - startTime
503503
val taskName = s"task ${info.id} in stage ${taskSet.id}"
504504
logInfo(s"Starting $taskName (TID $taskId, $host, executor ${info.executorId}, " +
505-
s"partition ${task.partitionId}, $taskLocality, ${serializedTask.limit} bytes)")
505+
s"partition ${task.partitionId}, $taskLocality, ${serializedTask.limit()} bytes)")
506506

507507
sched.dagScheduler.taskStarted(task, info)
508508
new TaskDescription(

0 commit comments

Comments
 (0)