isMrH commented on issue #2698:
URL:
https://github.com/apache/incubator-seatunnel/issues/2698#issuecomment-1246191180
@Carl-Zhou-CN I don't know if it's related to this issue
Exception when primary key or unique key exists in `tidb` table.
```
sink {
tidb {
addr = "10.32.48.xx",
port = "4000"
database = "test"
table = "test"
user = "user "
password = "password "
source_table_name = "test"
replace = "true"
}
}
# tidb
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增ID',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_ip_id` (`ip_id`)
```
```
22/09/14 11:28:03 INFO yarn.Client:
client token: N/A
diagnostics: User class threw exception:
org.apache.seatunnel.core.base.exception.CommandExecuteException: Execute Spark
task error
at
org.apache.seatunnel.core.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:70)
at org.apache.seatunnel.core.base.Seatunnel.run(Seatunnel.java:40)
at
org.apache.seatunnel.core.spark.SeatunnelSpark.main(SeatunnelSpark.java:33)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:673)
Caused by: com.pingcap.tikv.exception.GrpcException: retry is exhausted.
at
com.pingcap.tikv.util.ConcreteBackOffer.doBackOffWithMaxSleep(ConcreteBackOffer.java:148)
at
com.pingcap.tikv.util.ConcreteBackOffer.doBackOff(ConcreteBackOffer.java:119)
at
com.pingcap.tikv.allocator.RowIDAllocator.create(RowIDAllocator.java:95)
at
com.pingcap.tispark.write.TiBatchWriteTable.getRowIDAllocator(TiBatchWriteTable.scala:395)
at
com.pingcap.tispark.write.TiBatchWriteTable.preCalculate(TiBatchWriteTable.scala:201)
at
com.pingcap.tispark.write.TiBatchWrite$$anonfun$1.apply(TiBatchWrite.scala:203)
at
com.pingcap.tispark.write.TiBatchWrite$$anonfun$1.apply(TiBatchWrite.scala:203)
at scala.collection.immutable.List.map(List.scala:284)
at
com.pingcap.tispark.write.TiBatchWrite.doWrite(TiBatchWrite.scala:203)
at
com.pingcap.tispark.write.TiBatchWrite.com$pingcap$tispark$write$TiBatchWrite$$write(TiBatchWrite.scala:88)
at com.pingcap.tispark.write.TiBatchWrite$.write(TiBatchWrite.scala:45)
at com.pingcap.tispark.write.TiDBWriter$.write(TiDBWriter.scala:40)
at
com.pingcap.tispark.TiDBDataSource.createRelation(TiDBDataSource.scala:57)
at
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
at
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
at
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:668)
at
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:668)
at
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
at
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:668)
at
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:276)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:270)
at org.apache.seatunnel.spark.tidb.sink.Tidb.output(Tidb.scala:57)
at org.apache.seatunnel.spark.tidb.sink.Tidb.output(Tidb.scala:30)
at
org.apache.seatunnel.spark.SparkEnvironment.sinkProcess(SparkEnvironment.java:179)
at
org.apache.seatunnel.spark.batch.SparkBatchExecution.start(SparkBatchExecution.java:54)
at
org.apache.seatunnel.core.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:67)
... 7 more
Caused by: java.lang.RuntimeException: java.io.EOFException
at
com.pingcap.tikv.codec.CodecDataInput.readLong(CodecDataInput.java:158)
at
com.pingcap.tikv.allocator.RowIDAllocator.updateMeta(RowIDAllocator.java:159)
at
com.pingcap.tikv.allocator.RowIDAllocator.updateHash(RowIDAllocator.java:196)
at
com.pingcap.tikv.allocator.RowIDAllocator.udpateAllocateId(RowIDAllocator.java:230)
at
com.pingcap.tikv.allocator.RowIDAllocator.initSigned(RowIDAllocator.java:273)
at
com.pingcap.tikv.allocator.RowIDAllocator.doCreate(RowIDAllocator.java:109)
at
com.pingcap.tikv.allocator.RowIDAllocator.create(RowIDAllocator.java:90)
... 42 more
Caused by: java.io.EOFException
at java.io.DataInputStream.readFully(DataInputStream.java:197)
at java.io.DataInputStream.readLong(DataInputStream.java:416)
at
com.pingcap.tikv.codec.CodecDataInput.readLong(CodecDataInput.java:156)
... 48 more
ApplicationMaster host: hdfs-05.qd.link-x.host
ApplicationMaster RPC port: 36736
queue: root.users.root
start time: 1663125944895
final status: FAILED
tracking URL:
http://cdh-01.qd.link-x.host:8088/proxy/application_1662097049317_11464/
user: root
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]