BBency commented on issue #9094:
URL: https://github.com/apache/hudi/issues/9094#issuecomment-1621136545

   ### Approach 1 Error Stack Trace :
   23/07/04 13:33:06 ERROR UtilHelpers: Cluster failed
   org.apache.spark.SparkException: Job aborted due to stage failure: task 0.0 
in stage 7.0 (TID 163) had a not serializable result: 
org.apache.avro.generic.GenericData$Record
   Serialization stack:
        - object not serializable (class: 
org.apache.avro.generic.GenericData$Record, value: 
   {
       "_hoodie_commit_time": "20230704131903418",
       "_hoodie_commit_seqno": "20230704131903418_8_27177164",
       "_hoodie_record_key": "col1:1234,col2:21,col3:15,col4:1,col5:1",
       "_hoodie_partition_path": "partition_column_0=201609",
       "_hoodie_file_name": 
"9dda6486-8f7d-4cbb-ae69-fc806c659e7e-38_8-2005-0_20230704131903418.parquet",
       "Op": null,
       "TIMESTAMP": "2023-06-02 11:19:11.955233",
       "col2": 21,
       "col3": 15,
       "col1": 1234,
       "col4": 1,
       "col5": 1,
       "col6": "N",
       "col7": "S",
       "col8": 17047,
       "col9": "09:29:19",
       "col10": 2932896,
       "col11": "00:00:00",
       "col12": "N",
       "col13": -719131,
       "col14": "00:00:00",
       "col15": 1,
       "col16": 0,
       "col17": "A",
       "partition_column_0": "201609"
   }
   )
        - field (class: org.apache.hudi.common.model.RewriteAvroPayload, name: 
record, type: interface org.apache.avro.generic.GenericRecord)
        - object (class org.apache.hudi.common.model.RewriteAvroPayload, 
org.apache.hudi.common.model.RewriteAvroPayload@6122c951)
        - field (class: org.apache.hudi.common.model.HoodieRecord, name: data, 
type: class java.lang.Object)
        - object (class org.apache.hudi.common.model.HoodieAvroRecord, 
HoodieRecord{key=HoodieKey { recordKey=col1:1234,col2:21,col3:15,col4:1,col5:1 
partitionPath=partition_column_0=201609}, currentLocation='null', 
newLocation='null'})
        at 
org.apache.spark.scheduler.DAGScheduler.failJobAncol1dependentStages(DAGScheduler.scala:2863)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2799)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2798)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) 
~[scala-library-2.12.15.jar:?]
        at 
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) 
~[scala-library-2.12.15.jar:?]
        at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2798) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1239)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1239)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at scala.Option.foreach(Option.scala:407) ~[scala-library-2.12.15.jar:?]
        at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1239)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3051)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2993)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
 ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:1009) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2229) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2250) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2269) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2294) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1021) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:406) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.rdd.RDD.collect(RDD.scala:1020) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at org.apache.spark.api.java.JavaRDDLike.collect(JavaRDDLike.scala:362) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.api.java.JavaRDDLike.collect$(JavaRDDLike.scala:361) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) 
~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]
        at 
org.apache.hudi.data.HoodieJavaRDD.collectAsList(HoodieJavaRDD.java:155) 
~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.commit.BaseCommitActionExecutor.executeClustering(BaseCommitActionExecutor.java:250)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.cluster.SparkExecuteClusteringCommitActionExecutor.execute(SparkExecuteClusteringCommitActionExecutor.java:53)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.HoodieSparkCopyOnWriteTable.cluster(HoodieSparkCopyOnWriteTable.java:188)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.SparkRDDWriteClient.cluster(SparkRDDWriteClient.java:362)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.utilities.HoodieClusteringJob.doScheduleAndCluster(HoodieClusteringJob.java:255)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.utilities.HoodieClusteringJob.lambda$cluster$0(HoodieClusteringJob.java:168)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at org.apache.hudi.utilities.UtilHelpers.retry(UtilHelpers.java:559) 
~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.utilities.HoodieClusteringJob.cluster(HoodieClusteringJob.java:155)
 ~[hudi-utilities-bundle_2.12-0.12.1.jar:0.12.1]
        at 
HudiClusteringJob$.main(eec-aws-uk-ukidcibatchanalytics-hudi-clustering-trigger.scala:40)
 ~[eec-aws-uk-ukidcibatchanalytics-hudi-clustering-trigger.scala.jar:?]
        at 
HudiClusteringJob.main(eec-aws-uk-ukidcibatchanalytics-hudi-clustering-trigger.scala)
 ~[eec-aws-uk-ukidcibatchanalytics-hudi-clustering-trigger.scala.jar:?]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_372]
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_372]
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_372]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_372]
        at 
com.amazonaws.services.glue.SparkProcessLauncherPlugin.invoke(ProcessLauncher.scala:50)
 ~[AWSGlueSparkResourceManager-1.0.jar:?]
        at 
com.amazonaws.services.glue.SparkProcessLauncherPlugin.invoke$(ProcessLauncher.scala:50)
 ~[AWSGlueSparkResourceManager-1.0.jar:?]
        at 
com.amazonaws.services.glue.ProcessLauncher$$anon$1.invoke(ProcessLauncher.scala:92)
 ~[AWSGlueSparkResourceManager-1.0.jar:?]
        at 
com.amazonaws.services.glue.ProcessLauncher.launch(ProcessLauncher.scala:163) 
~[AWSGlueSparkResourceManager-1.0.jar:?]
        at 
com.amazonaws.services.glue.ProcessLauncher$.main(ProcessLauncher.scala:30) 
~[AWSGlueSparkResourceManager-1.0.jar:?]
        at 
com.amazonaws.services.glue.ProcessLauncher.main(ProcessLauncher.scala) 
~[AWSGlueSparkResourceManager-1.0.jar:?]
        
   ### Approach 2 Error Stack Trace :
   2023-07-04 13:20:38,915 ERROR [main] glue.ProcessLauncher 
(Logging.scala:logError(77)): Error from Python:Traceback (most recent call 
last):
     File "/tmp/eec-aws-uk-ukidcibatchanalytics-hudi-clustering-job.py", line 
54, in <module>
       main()
     File "/tmp/eec-aws-uk-ukidcibatchanalytics-hudi-clustering-job.py", line 
47, in main
       spark_df_run_clustering = spark.sql(query_run_clustering)
     File "/opt/amazon/spark/python/lib/pyspark.zip/pyspark/sql/session.py", 
line 1034, in sql
       return DataFrame(self._jsparkSession.sql(sqlQuery), self)
     File 
"/opt/amazon/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py", line 
1321, in __call__
       return_value = get_return_value(
     File "/opt/amazon/spark/python/lib/pyspark.zip/pyspark/sql/utils.py", line 
190, in deco
       return f(*a, **kw)
     File 
"/opt/amazon/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py", line 
326, in get_return_value
       raise Py4JJavaError(
   py4j.protocol.Py4JJavaError: An error occurred while calling o97.sql.
   : org.apache.hudi.exception.HoodieClusteringException: Clustering failed to 
write to 
files:61b699e1-9a0b-4a23-8102-f66ab5b46fc8-0,3c2c597d-f807-4c71-aede-7b7c8b95ef19-0,69751dea-eddb-4cc5-97da-507ae885512b-0,ab443ddc-bfad-49a8-aadc-be47150f3c43-0,d9e692e6-cdb1-47d9-b7dd-63e288f42c44-0,0597f0dc-f9dd-4e17-a030-098eaa70860e-0,026645e2-9c06-4804-ac60-13dd9cde28ee-0,a61b0dc0-3762-4dc0-9379-53d8f36d988c-0,82107955-04a4-43dd-b62a-3e89860c2924-0,7526b140-d4b2-4e48-8f49-0df8018feddf-0,83fb889e-bf1d-43ce-bdb9-f68b9d4ee43a-0,d989f110-fbd0-4882-ad67-ca16f1179681-0,127c70f3-ba55-4dbc-8fe9-69c14c857e10-0,0a0b5c3b-40f8-4ff3-aac2-05d575eecd3b-0,a592daf4-5b43-42e8-a85b-f82b318cb76a-0,2dadec1c-520e-4c18-9e01-c4d4de3c42e4-0,f918e9f2-4254-49ce-9026-459545075a6c-0,b4be2ac2-8238-475f-9e4c-736a778299f1-0,6ec7b26a-c5d9-43a0-82e3-487d1a440565-0,ddf44fbd-b537-463b-af28-cc4f45e9f447-0,1cfbd8bd-06c8-4b77-9d7a-52efa2dc59a0-0,19934523-c4ea-4285-acd2-b9077dd0f028-0,8efd2ea5-d96b-4010-9aa6-beeaa0d0026f-0,1b82307
 
e-a481-4f67-abfd-271f8fc700d7-0,ad7f46dd-8ca4-4cb4-8fde-add11565c58c-0,63fd0f37-4c8c-439d-a7eb-571786c9d88c-0,a9abb2e6-7d50-4b80-baf3-4fb23b130741-0,f3a19847-e1ea-49bf-99b1-aaec2b0a21b0-0,ce89f9cf-86a6-46fd-b1b4-327fed85d8c4-0,e38269b8-8d55-4cc5-a9be-3edf156cc81a-0
        at 
org.apache.hudi.client.SparkRDDWriteClient.completeClustering(SparkRDDWriteClient.java:381)
        at 
org.apache.hudi.client.SparkRDDWriteClient.completeTableService(SparkRDDWriteClient.java:468)
        at 
org.apache.hudi.client.SparkRDDWriteClient.cluster(SparkRDDWriteClient.java:368)
        at 
org.apache.spark.sql.hudi.command.procedures.RunClusteringProcedure.$anonfun$call$9(RunClusteringProcedure.scala:115)
        at scala.collection.immutable.Stream.foreach(Stream.scala:533)
        at 
org.apache.spark.sql.hudi.command.procedures.RunClusteringProcedure.call(RunClusteringProcedure.scala:115)
        at 
org.apache.spark.sql.hudi.command.CallProcedureHoodieCommand.run(CallProcedureHoodieCommand.scala:33)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
        at 
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
        at 
org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
        at 
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591)
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222)
        at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99)
        at 
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
        at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
        at py4j.Gateway.invoke(Gateway.java:282)
        at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
        at py4j.commands.CallCommand.execute(CallCommand.java:79)
        at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
        at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
        at java.lang.Thread.run(Thread.java:750)
   
   2023-07-04 13:20:38,916 INFO [main] glue.ProcessLauncher 
(Logging.scala:logInfo(61)): postprocessing
   2023-07-04 13:20:38,917 INFO [main] glue.LogPusher 
(Logging.scala:logInfo(61)): stopping
   java.lang.reflect.InvocationTargetException
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.amazonaws.services.glue.SparkProcessLauncherPlugin.invoke(ProcessLauncher.scala:50)
        at 
com.amazonaws.services.glue.SparkProcessLauncherPlugin.invoke$(ProcessLauncher.scala:50)
        at 
com.amazonaws.services.glue.ProcessLauncher$$anon$1.invoke(ProcessLauncher.scala:92)
        at 
com.amazonaws.services.glue.ProcessLauncher.launch(ProcessLauncher.scala:163)
        at 
com.amazonaws.services.glue.ProcessLauncher$.main(ProcessLauncher.scala:30)
        at 
com.amazonaws.services.glue.ProcessLauncher.main(ProcessLauncher.scala)
   
   Caused by: org.apache.spark.SparkUserAppException: User application exited 
with 1
        at org.apache.spark.deploy.PythonRunner$.main(PythonRunner.scala:111)
        at org.apache.spark.deploy.PythonRunner.main(PythonRunner.scala)
        ... 10 more
   Continuous Logging: Shutting down cloudwatch appender.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to