Github user appleyuchi commented on the issue:

    https://github.com/apache/spark/pull/18099
  
    the following occur to me when I run lab with ALS in spark
    
    8/08/22 21:24:14 ERROR Utils: Uncaught exception in thread stdout writer 
for python
    j**ava.lang.AssertionError: assertion failed: Block rdd_7_0 is not locked 
for reading**
        at scala.Predef$.assert(Predef.scala:170)
        at 
org.apache.spark.storage.BlockInfoManager.unlock(BlockInfoManager.scala:299)
        at 
org.apache.spark.storage.BlockManager.releaseLock(BlockManager.scala:769)
        at 
org.apache.spark.storage.BlockManager$$anonfun$1.apply$mcV$sp(BlockManager.scala:540)
        at 
org.apache.spark.util.CompletionIterator$$anon$1.completion(CompletionIterator.scala:44)
        at 
org.apache.spark.util.CompletionIterator.hasNext(CompletionIterator.scala:33)
        at 
org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)
        at scala.collection.Iterator$class.foreach(Iterator.scala:893)
        at 
org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)
        at 
org.apache.spark.api.python.PythonRDD$.writeIteratorToStream(PythonRDD.scala:213)
        at 
org.apache.spark.api.python.PythonRunner$$anon$2.writeIteratorToStream(PythonRunner.scala:407)
        at 
org.apache.spark.api.python.BasePythonRunner$WriterThread$$anonfun$run$1.apply(PythonRunner.scala:215)
        at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1991)
        at 
org.apache.spark.api.python.BasePythonRunner$WriterThread.run(PythonRunner.scala:170)
    Exception in thread "stdout writer for python" java.lang.AssertionError: 
assertion failed: Block rdd_7_0 is not locked for reading
        at scala.Predef$.assert(Predef.scala:170)
        at 
org.apache.spark.storage.BlockInfoManager.unlock(BlockInfoManager.scala:299)
        at 
org.apache.spark.storage.BlockManager.releaseLock(BlockManager.scala:769)
        at 
org.apache.spark.storage.BlockManager$$anonfun$1.apply$mcV$sp(BlockManager.scala:540)
        at 
org.apache.spark.util.CompletionIterator$$anon$1.completion(CompletionIterator.scala:44)
        at 
org.apache.spark.util.CompletionIterator.hasNext(CompletionIterator.scala:33)
        at 
org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)
        at scala.collection.Iterator$class.foreach(Iterator.scala:893)
        at 
org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)
        at 
org.apache.spark.api.python.PythonRDD$.writeIteratorToStream(PythonRDD.scala:213)
        at 
org.apache.spark.api.python.PythonRunner$$anon$2.writeIteratorToStream(PythonRunner.scala:407)
        at 
org.apache.spark.api.python.BasePythonRunner$WriterThread$$anonfun$run$1.apply(PythonRunner.scala:215)
        at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1991)
        at 
org.apache.spark.api.python.BasePythonRunner$WriterThread.run(PythonRunner.scala:170)



---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to