Github user rdblue commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21977#discussion_r207722412
  
    --- Diff: python/pyspark/worker.py ---
    @@ -259,6 +260,26 @@ def main(infile, outfile):
                                  "PYSPARK_DRIVER_PYTHON are correctly set.") %
                                 ("%d.%d" % sys.version_info[:2], version))
     
    +        # set up memory limits
    +        memory_limit_mb = int(os.environ.get('PYSPARK_EXECUTOR_MEMORY_MB', 
"-1"))
    +        total_memory = resource.RLIMIT_AS
    +        try:
    +            (total_memory_limit, max_total_memory) = 
resource.getrlimit(total_memory)
    +            msg = "Current mem: {0} of max 
{1}\n".format(total_memory_limit, max_total_memory)
    +            sys.stderr.write(msg)
    +
    +            if memory_limit_mb > 0 and total_memory_limit == 
resource.RLIM_INFINITY:
    +                # convert to bytes
    +                total_memory_limit = memory_limit_mb * 1024 * 1024
    +
    +                msg = "Setting mem to {0} of max 
{1}\n".format(total_memory_limit, max_total_memory)
    +                sys.stderr.write(msg)
    +                resource.setrlimit(total_memory, (total_memory_limit, 
total_memory_limit))
    +
    +        except (resource.error, OSError) as e:
    +            # not all systems support resource limits, so warn instead of 
failing
    +            sys.stderr.write("WARN: Failed to set memory limit: 
{0}\n".format(e))
    --- End diff --
    
    Fixed.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to