shamim created SPARK-42127: ------------------------------ Summary: Spark 3.3.0, Error with java.io.IOException: Mkdirs failed to create file Key: SPARK-42127 URL: https://issues.apache.org/jira/browse/SPARK-42127 Project: Spark Issue Type: Bug Components: Spark Core Affects Versions: 3.3.0 Reporter: shamim
23/01/18 20:23:24 WARN TaskSetManager: Lost task 4.0 in stage 0.0 (TID 4) (10.64.109.72 executor 0): java.io.IOException: Mkdirs failed to create file:/var/backup/_temporary/0/_temporary/attempt_202301182023173234741341853025716_0005_m_000004_0 (exists=false, cwd=file:/opt/spark-3.3.0/work/app-20230118202317-0001/0) at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:515) at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:500) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1195) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1081) at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:113) at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:238) at org.apache.spark.internal.io.SparkHadoopWriter$.executeTask(SparkHadoopWriter.scala:126) at org.apache.spark.internal.io.SparkHadoopWriter$.$anonfun$write$1(SparkHadoopWriter.scala:88) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at org.apache.spark.scheduler.Task.run(Task.scala:136) at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:750) -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org