This is an automated email from the ASF dual-hosted git repository.
diwu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris-spark-connector.git
The following commit(s) were added to refs/heads/master by this push:
new 71af841 fix error when reading with not in clause (#134)
71af841 is described below
commit 71af841c7536d843e9d7babaf69e4b95298a9502
Author: gnehil <[email protected]>
AuthorDate: Fri Aug 25 16:55:06 2023 +0800
fix error when reading with not in clause (#134)
---
.../scala/org/apache/doris/spark/sql/DorisRelation.scala | 12 +++++-------
.../src/main/scala/org/apache/doris/spark/sql/Utils.scala | 6 ++++++
2 files changed, 11 insertions(+), 7 deletions(-)
diff --git
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisRelation.scala
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisRelation.scala
index 4c9d348..049d5a2 100644
---
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisRelation.scala
+++
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisRelation.scala
@@ -17,19 +17,18 @@
package org.apache.doris.spark.sql
-import scala.collection.JavaConverters._
-import scala.collection.mutable
-import scala.math.min
-
import org.apache.doris.spark.cfg.ConfigurationOptions._
import org.apache.doris.spark.cfg.{ConfigurationOptions, SparkSettings}
-
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.jdbc.JdbcDialects
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+import scala.math.min
+
private[sql] class DorisRelation(
val sqlContext: SQLContext, parameters: Map[String, String])
@@ -81,8 +80,7 @@ private[sql] class DorisRelation(
}
if (filters != null && filters.length > 0) {
- val dorisFilterQuery =
cfg.getProperty(ConfigurationOptions.DORIS_FILTER_QUERY, "1=1")
- paramWithScan += (ConfigurationOptions.DORIS_FILTER_QUERY ->
(dorisFilterQuery + " and " + filterWhereClause))
+ paramWithScan += (ConfigurationOptions.DORIS_FILTER_QUERY ->
filterWhereClause)
}
new ScalaDorisRowRDD(sqlContext.sparkContext, paramWithScan.toMap,
lazySchema)
diff --git
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/Utils.scala
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/Utils.scala
index 2b9c3c1..54976a7 100644
---
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/Utils.scala
+++
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/Utils.scala
@@ -59,6 +59,12 @@ private[spark] object Utils {
} else {
s"${quote(attribute)} in (${compileValue(values)})"
}
+ case Not(In(attribute, values)) =>
+ if (values.isEmpty || values.length >= inValueLengthLimit) {
+ null
+ } else {
+ s"${quote(attribute)} not in (${compileValue(values)})"
+ }
case IsNull(attribute) => s"${quote(attribute)} is null"
case IsNotNull(attribute) => s"${quote(attribute)} is not null"
case And(left, right) =>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]