JNSimba commented on code in PR #305:
URL:
https://github.com/apache/doris-spark-connector/pull/305#discussion_r2033256898
##########
spark-doris-connector/spark-doris-connector-it/src/test/java/org/apache/doris/spark/sql/DorisWriterFailoverITCase.scala:
##########
@@ -217,4 +226,28 @@ class DorisWriterFailoverITCase extends
AbstractContainerTestBase {
LOG.info("Checking DorisWriterFailoverITCase result. testName={},
actual={}, expected={}", testName, actual, expected)
assertEqualsInAnyOrder(expected.toList.asJava, actual.toList.asJava)
}
+
+ @Test
+ def testForWriteExceptionBeforeStop(): Unit = {
+ initializeTable(TABLE_WRITE_TBL_FAIL_BEFORE_STOP, DataModel.DUPLICATE)
+ val session = SparkSession.builder().master("local[1]").getOrCreate()
+ val df = session.createDataFrame(Seq(
+ ("doris", "cn"),
+ ("spark", "us"),
+ ("catalog", "uk")
+ )).toDF("name", "address")
+ thrown.expect(classOf[SparkException])
+ thrown.expectMessage("Only unique key merge on write support partial
update")
+ df.write.format("doris")
+ .option("table.identifier", DATABASE + "." +
TABLE_WRITE_TBL_FAIL_BEFORE_STOP)
+ .option("fenodes", getFenodes)
+ .option("user", getDorisUsername)
+ .option("password", getDorisPassword)
+ .option("doris.sink.properties.partial_columns", "true")
+ .option("doris.sink.net.buffer.size", "1")
+ .mode("append")
+ .save()
+ session.stop()
Review Comment:
If itcase fails, stop needs to be put in the finally block, otherwise the
session will not stop and the next itcase will be reused and may report an
error.
##########
spark-doris-connector/spark-doris-connector-it/src/test/java/org/apache/doris/spark/sql/DorisWriterFailoverITCase.scala:
##########
@@ -217,4 +226,28 @@ class DorisWriterFailoverITCase extends
AbstractContainerTestBase {
LOG.info("Checking DorisWriterFailoverITCase result. testName={},
actual={}, expected={}", testName, actual, expected)
assertEqualsInAnyOrder(expected.toList.asJava, actual.toList.asJava)
}
+
+ @Test
+ def testForWriteExceptionBeforeStop(): Unit = {
+ initializeTable(TABLE_WRITE_TBL_FAIL_BEFORE_STOP, DataModel.DUPLICATE)
+ val session = SparkSession.builder().master("local[1]").getOrCreate()
+ val df = session.createDataFrame(Seq(
+ ("doris", "cn"),
+ ("spark", "us"),
+ ("catalog", "uk")
+ )).toDF("name", "address")
+ thrown.expect(classOf[SparkException])
+ thrown.expectMessage("Only unique key merge on write support partial
update")
+ df.write.format("doris")
+ .option("table.identifier", DATABASE + "." +
TABLE_WRITE_TBL_FAIL_BEFORE_STOP)
+ .option("fenodes", getFenodes)
+ .option("user", getDorisUsername)
+ .option("password", getDorisPassword)
+ .option("doris.sink.properties.partial_columns", "true")
+ .option("doris.sink.net.buffer.size", "1")
+ .mode("append")
+ .save()
+ session.stop()
Review Comment:
The itcase fails, stop needs to be put in the finally block, otherwise the
session will not stop and the next itcase will be reused and may report an
error.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]