adutra commented on code in PR #859:
URL: https://github.com/apache/polaris/pull/859#discussion_r1930145178
##########
getting-started/spark/notebooks/SparkPolaris.ipynb:
##########
@@ -539,7 +761,23 @@
"raises-exception"
]
},
- "outputs": [],
+ "outputs": [
+ {
+ "ename": "Py4JJavaError",
+ "evalue": "An error occurred while calling o90.sql.\n:
org.apache.iceberg.exceptions.ForbiddenException: Forbidden: Principal 'mlee'
with activated PrincipalRoles '[]' and activated grants via '[product_manager,
read_only]' is not authorized for op UPDATE_TABLE\n\tat
org.apache.iceberg.rest.ErrorHandlers$DefaultErrorHandler.accept(ErrorHandlers.java:212)\n\tat
org.apache.iceberg.rest.ErrorHandlers$CommitErrorHandler.accept(ErrorHandlers.java:97)\n\tat
org.apache.iceberg.rest.ErrorHandlers$CommitErrorHandler.accept(ErrorHandlers.java:80)\n\tat
org.apache.iceberg.rest.HTTPClient.throwFailure(HTTPClient.java:211)\n\tat
org.apache.iceberg.rest.HTTPClient.execute(HTTPClient.java:323)\n\tat
org.apache.iceberg.rest.HTTPClient.execute(HTTPClient.java:262)\n\tat
org.apache.iceberg.rest.HTTPClient.post(HTTPClient.java:368)\n\tat
org.apache.iceberg.rest.RESTClient.post(RESTClient.java:112)\n\tat
org.apache.iceberg.rest.RESTTableOperations.commit(RESTTableOperations.java:159)\n\tat
org.apa
che.iceberg.SnapshotProducer.lambda$commit$2(SnapshotProducer.java:429)\n\tat
org.apache.iceberg.util.Tasks$Builder.runTaskWithRetry(Tasks.java:413)\n\tat
org.apache.iceberg.util.Tasks$Builder.runSingleThreaded(Tasks.java:219)\n\tat
org.apache.iceberg.util.Tasks$Builder.run(Tasks.java:203)\n\tat
org.apache.iceberg.util.Tasks$Builder.run(Tasks.java:196)\n\tat
org.apache.iceberg.SnapshotProducer.commit(SnapshotProducer.java:401)\n\tat
org.apache.iceberg.spark.source.SparkWrite.commitOperation(SparkWrite.java:233)\n\tat
org.apache.iceberg.spark.source.SparkWrite$BatchAppend.commit(SparkWrite.java:301)\n\tat
org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:399)\n\tat
org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:359)\n\tat
org.apache.spark.sql.execution.datasources.v2.AppendDataExec.writeWithV2(WriteToDataSourceV2Exec.scala:225)\n\tat
org.apache.spark.sql.execution.datasou
rces.v2.V2ExistingTableWriteExec.run(WriteToDataSourceV2Exec.scala:337)\n\tat
org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run$(WriteToDataSourceV2Exec.scala:336)\n\tat
org.apache.spark.sql.execution.datasources.v2.AppendDataExec.run(WriteToDataSourceV2Exec.scala:225)\n\tat
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)\n\tat
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)\n\tat
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)\n\tat
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:107)\n\tat
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)\n\tat
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)\n\tat
org.apache.spark.sql.execution.SQLExecution$.$ano
nfun$withNewExecutionId$1(SQLExecution.scala:108)\n\tat
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)\n\tat
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)\n\tat
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:107)\n\tat
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)\n\tat
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:461)\n\tat
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:76)\n\tat
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:461)\n\tat
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)\n\tat
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.tran
sformDownWithPruning(AnalysisHelper.scala:267)\n\tat
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)\n\tat
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)\n\tat
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)\n\tat
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:437)\n\tat
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:98)\n\tat
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:85)\n\tat
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:83)\n\tat
org.apache.spark.sql.Dataset.<init>(Dataset.scala:220)\n\tat
org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)\n\tat
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)\n\tat
org.apache.spark.s
ql.Dataset$.ofRows(Dataset.scala:97)\n\tat
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:638)\n\tat
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)\n\tat
org.apache.spark.sql.SparkSession.sql(SparkSession.scala:629)\n\tat
org.apache.spark.sql.SparkSession.sql(SparkSession.scala:659)\n\tat
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)\n\tat
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)\n\tat
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat
java.base/java.lang.reflect.Method.invoke(Method.java:568)\n\tat
py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)\n\tat
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)\n\tat
py4j.Gateway.invoke(Gateway.java:282)\n\tat
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)\n\tat
py4j.commands.CallCommand.execute(CallCom
mand.java:79)\n\tat
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)\n\tat
py4j.ClientServerConnection.run(ClientServerConnection.java:106)\n\tat
java.base/java.lang.Thread.run(Thread.java:833)\n",
Review Comment:
Whoops, I didn't mean to commit these changes 😅
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]