File tree Expand file tree Collapse file tree 2 files changed +8
-6
lines changed Expand file tree Collapse file tree 2 files changed +8
-6
lines changed Original file line number Diff line number Diff line change @@ -189,7 +189,9 @@ of the most common options to set are:
189189 limited to this amount. If not set, Spark will not limit Python's memory use
190190 and it is up to the application to avoid exceeding the overhead memory space
191191 shared with other non-JVM processes. When PySpark is run in YARN or Kubernetes, this memory
192- is added to executor resource requests. This configuration is not supported on Windows.
192+ is added to executor resource requests.
193+
194+ NOTE: This configuration is not supported on Windows.
193195 </td >
194196</tr >
195197<tr >
Original file line number Diff line number Diff line change 2222import os
2323import sys
2424import time
25- # 'resource' is a Unix specific package .
26- has_resource_package = True
25+ # 'resource' is a Unix specific module .
26+ has_resource_module = True
2727try :
2828 import resource
2929except ImportError :
30- has_resource_package = False
30+ has_resource_module = False
3131import socket
3232import traceback
3333
@@ -274,8 +274,8 @@ def main(infile, outfile):
274274 # set up memory limits
275275 memory_limit_mb = int (os .environ .get ('PYSPARK_EXECUTOR_MEMORY_MB' , "-1" ))
276276 # 'PYSPARK_EXECUTOR_MEMORY_MB' should be undefined on Windows because it depends on
277- # resource package which is a Unix specific package .
278- if memory_limit_mb > 0 and has_resource_package :
277+ # resource module which is a Unix specific module .
278+ if memory_limit_mb > 0 and has_resource_module :
279279 total_memory = resource .RLIMIT_AS
280280 try :
281281 (soft_limit , hard_limit ) = resource .getrlimit (total_memory )
You can’t perform that action at this time.
0 commit comments