[ 
https://issues.apache.org/jira/browse/SPARK-54753?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

xihuan updated SPARK-54753:
---------------------------
    Description: 
For Apache Spark *4.0.1* local mode, the memory is not released after long time 
running, while downgrade the spark version {*}v3.5.6{*}, there is no issue.


The issue can be reproduced with simple test cases:

```
package spark;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;

public class SparkApp {
    private static final Logger log = LoggerFactory.getLogger(SparkApp.class);

    private static final String SPARK_MASTER_URL = "local[4]";
    private static final String SPARK_MEMORY = "500m";

    public static void main(String[] args) {
        log.debug("Starting application...");

        SparkSession sparkSession = SparkSession.builder()
                .appName("Test Application")
                .master(SPARK_MASTER_URL)
                .config("spark.driver.memory", SPARK_MEMORY)
                
//.config("spark.sql.sources.bucketing.autoBucketedScan.enabled", false) // To 
get rid of the memory leak
                .getOrCreate();

        processData(sparkSession);
    }

    private static void processData(SparkSession sparkSession) {
        
        while (true) {
            //load from a local csv file
            Dataset<Row> dataset = sparkSession.read().csv("c:/City.csv");
            dataset.show(5);

            log.debug("Persist dataset...");
            // This is the place where the memory leak occurs
            dataset.persist();

            // Do something...
            log.debug("Do something with the persisted dataset");

            // ...and unpersist the dataset
            log.debug("Unpersist dataset...");
            dataset.unpersist();

            log.debug("Processing data completed");
        }
    }

}

```
 
with Apache Spark 4.0.0 or 4.0.1, around 9 minutes, will encounter oom.

 !screenshot-1.png! 


While, with Apache Spark 3.5.6, no such issue occurs.


 

 

 

 

  was:
For Apache Spark *4.0.1* local mode, the memory is not released after long time 
running, while downgrade the spark version {*}v3.5.6{*}, there is no issue.


The issue can be reproduced with simple test cases:

```
package spark;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;

public class SparkApp {
    private static final Logger log = LoggerFactory.getLogger(SparkApp.class);

    private static final String SPARK_MASTER_URL = "local[4]";
    private static final String SPARK_MEMORY = "500m";

    public static void main(String[] args) {
        log.debug("Starting application...");

        SparkSession sparkSession = SparkSession.builder()
                .appName("Test Application")
                .master(SPARK_MASTER_URL)
                .config("spark.driver.memory", SPARK_MEMORY)
                
//.config("spark.sql.sources.bucketing.autoBucketedScan.enabled", false) // To 
get rid of the memory leak
                .getOrCreate();

        processData(sparkSession);
    }

    private static void processData(SparkSession sparkSession) {
        
        while (true) {
            //load from a local csv file
            Dataset<Row> dataset = sparkSession.read().csv("c:/City.csv");
            dataset.show(5);

            log.debug("Persist dataset...");
            // This is the place where the memory leak occurs
            dataset.persist();

            // Do something...
            log.debug("Do something with the persisted dataset");

            // ...and unpersist the dataset
            log.debug("Unpersist dataset...");
            dataset.unpersist();

            log.debug("Processing data completed");
        }
    }

}

```
 
with Apache Spark 4.0.0 or 4.0.1, around 9 minutes, will encounter oom.

 !screenshot-1.png! 


While, with Apache Spark 3.5.6, no such issue occurs.

 

 

 

 

       Priority: Critical  (was: Major)

> memory leak in Apache Spark 4.0.1
> ---------------------------------
>
>                 Key: SPARK-54753
>                 URL: https://issues.apache.org/jira/browse/SPARK-54753
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 4.0.1
>            Reporter: xihuan
>            Priority: Critical
>         Attachments: image-2025-12-23-05-38-53-324.png, pom.xml, 
> screenshot-1.png
>
>
> For Apache Spark *4.0.1* local mode, the memory is not released after long 
> time running, while downgrade the spark version {*}v3.5.6{*}, there is no 
> issue.
> The issue can be reproduced with simple test cases:
> ```
> package spark;
> import org.apache.spark.sql.Dataset;
> import org.apache.spark.sql.Row;
> import org.apache.spark.sql.SparkSession;
> import org.slf4j.Logger;
> import org.slf4j.LoggerFactory;
> import java.util.concurrent.Executors;
> import java.util.concurrent.ScheduledExecutorService;
> public class SparkApp {
>     private static final Logger log = LoggerFactory.getLogger(SparkApp.class);
>     private static final String SPARK_MASTER_URL = "local[4]";
>     private static final String SPARK_MEMORY = "500m";
>     public static void main(String[] args) {
>         log.debug("Starting application...");
>         SparkSession sparkSession = SparkSession.builder()
>                 .appName("Test Application")
>                 .master(SPARK_MASTER_URL)
>                 .config("spark.driver.memory", SPARK_MEMORY)
>                 
> //.config("spark.sql.sources.bucketing.autoBucketedScan.enabled", false) // 
> To get rid of the memory leak
>                 .getOrCreate();
>         processData(sparkSession);
>     }
>     private static void processData(SparkSession sparkSession) {
>         
>         while (true) {
>             //load from a local csv file
>             Dataset<Row> dataset = sparkSession.read().csv("c:/City.csv");
>             dataset.show(5);
>             log.debug("Persist dataset...");
>             // This is the place where the memory leak occurs
>             dataset.persist();
>             // Do something...
>             log.debug("Do something with the persisted dataset");
>             // ...and unpersist the dataset
>             log.debug("Unpersist dataset...");
>             dataset.unpersist();
>             log.debug("Processing data completed");
>         }
>     }
> }
> ```
>  
> with Apache Spark 4.0.0 or 4.0.1, around 9 minutes, will encounter oom.
>  !screenshot-1.png! 
> While, with Apache Spark 3.5.6, no such issue occurs.
>  
>  
>  
>  



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to