This is an automated email from the ASF dual-hosted git repository.

fanjia pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new 990ee8957 [Feature][Transform] data quality for null data rate (#1978)
990ee8957 is described below

commit 990ee8957dec5ce081e0b5ffd5e7d587c1d3bff0
Author: jun <[email protected]>
AuthorDate: Thu Jun 23 16:37:12 2022 +0800

    [Feature][Transform] data quality for null data rate (#1978)
---
 docs/en/transform/nullRate.md                      | 67 +++++++++++++++
 seatunnel-core/seatunnel-core-spark/pom.xml        |  6 ++
 .../seatunnel-transforms-spark/pom.xml             |  1 +
 .../pom.xml                                        | 34 +++++---
 .../org.apache.seatunnel.spark.BaseSparkTransform  | 18 ++++
 .../seatunnel/spark/transform/NullRate.scala       | 95 ++++++++++++++++++++++
 .../seatunnel/spark/transform/NullRateConfig.scala | 27 ++++++
 7 files changed, 238 insertions(+), 10 deletions(-)

diff --git a/docs/en/transform/nullRate.md b/docs/en/transform/nullRate.md
new file mode 100644
index 000000000..3f4790f6a
--- /dev/null
+++ b/docs/en/transform/nullRate.md
@@ -0,0 +1,67 @@
+# NullRate
+
+## Description
+
+When there is a large amount of data, the final result will always be greatly 
affected by the problem of data null value. Therefore, early null value 
detection is particularly important. For this reason, this function came into 
being
+
+:::tip
+
+This transform **ONLY** supported by Spark.
+
+:::
+
+## Options
+
+| name                     | type         | required | default value |
+| -------------------------| ------------ | -------- | ------------- |
+| fields                   | string_list  | yes      | -             |
+| rates                    | double_list  | yes      | -             |
+| throw_exception_enable   | boolean      | no       | -             |
+| save_to_table_name       | string       | no       | -             |
+
+
+
+### field [string_list]
+
+Which fields do you want to monitor .
+
+### rates [double_list]
+
+It is consistent with the number of fields. Double type indicates the set null 
rate value .
+
+### throw_exception_enable [boolean]
+
+Whether to throw an exception when it is greater than the set value. The 
default value is false .
+
+### save_to_table_name [string]
+
+Whether the current verification value is output to the table. It is not 
output by defaul .
+
+### common options [string]
+
+Transform plugin common parameters, please refer to [Transform 
Plugin](common-options.mdx) for details
+
+## Examples
+
+```bash
+  nullRate {
+     fields = ["msg", "name"]
+     rates = [10.0,3.45]
+     save_to_table_name = "tmp"
+     throw_exception_enable = true
+  }
+}
+```
+
+Use `NullRate` in transform's Dataset.
+
+```bash
+  transform {
+    NullRate {
+      fields = ["msg", "name"]
+      rates = [10.0,3.45]
+      save_to_table_name = "tmp"
+      throw_exception_enable = true
+    }
+  }
+```
diff --git a/seatunnel-core/seatunnel-core-spark/pom.xml 
b/seatunnel-core/seatunnel-core-spark/pom.xml
index 86410e077..33dbe2e4a 100644
--- a/seatunnel-core/seatunnel-core-spark/pom.xml
+++ b/seatunnel-core/seatunnel-core-spark/pom.xml
@@ -80,6 +80,12 @@
             <artifactId>seatunnel-transform-spark-uuid</artifactId>
             <version>${project.version}</version>
         </dependency>
+
+        <dependency>
+            <groupId>org.apache.seatunnel</groupId>
+            <artifactId>seatunnel-transform-spark-null-rate</artifactId>
+            <version>${project.version}</version>
+        </dependency>
     </dependencies>
 
     <build>
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/pom.xml 
b/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
index e95a4ebca..7e1740c3b 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
@@ -36,6 +36,7 @@
         <module>seatunnel-transform-spark-replace</module>
         <module>seatunnel-transform-spark-uuid</module>
         <module>seatunnel-transform-spark-sql</module>
+        <module>seatunnel-transform-spark-null-rate</module>
     </modules>
 
 </project>
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/pom.xml 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml
similarity index 59%
copy from seatunnel-transforms/seatunnel-transforms-spark/pom.xml
copy to 
seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml
index e95a4ebca..a79940eb5 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
+++ 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml
@@ -22,20 +22,34 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
     <parent>
         <groupId>org.apache.seatunnel</groupId>
-        <artifactId>seatunnel-transforms</artifactId>
+        <artifactId>seatunnel-transforms-spark</artifactId>
         <version>${revision}</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
-    <artifactId>seatunnel-transforms-spark</artifactId>
-    <packaging>pom</packaging>
+    <artifactId>seatunnel-transform-spark-null-rate</artifactId>
 
-    <modules>
-        <module>seatunnel-transform-spark-json</module>
-        <module>seatunnel-transform-spark-split</module>
-        <module>seatunnel-transform-spark-replace</module>
-        <module>seatunnel-transform-spark-uuid</module>
-        <module>seatunnel-transform-spark-sql</module>
-    </modules>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.seatunnel</groupId>
+            <artifactId>seatunnel-api-spark</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
 
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-core_${scala.binary.version}</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-sql_${scala.binary.version}</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+        </dependency>
+    </dependencies>
 </project>
diff --git 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/resources/META-INF/services/org.apache.seatunnel.spark.BaseSparkTransform
 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/resources/META-INF/services/org.apache.seatunnel.spark.BaseSparkTransform
new file mode 100644
index 000000000..f69c99f67
--- /dev/null
+++ 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/resources/META-INF/services/org.apache.seatunnel.spark.BaseSparkTransform
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.seatunnel.spark.transform.NullRate
\ No newline at end of file
diff --git 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/scala/org/apache/seatunnel/spark/transform/NullRate.scala
 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/scala/org/apache/seatunnel/spark/transform/NullRate.scala
new file mode 100644
index 000000000..5cea103f6
--- /dev/null
+++ 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/scala/org/apache/seatunnel/spark/transform/NullRate.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.spark.transform
+
+import org.apache.seatunnel.apis.base.plugin.Plugin
+import org.apache.seatunnel.common.config.CheckConfigUtil._
+import org.apache.seatunnel.common.config.CheckResult
+import org.apache.seatunnel.spark.{BaseSparkTransform, SparkEnvironment}
+import org.apache.spark.sql.catalyst.encoders.RowEncoder
+import org.apache.spark.sql.types.{DataTypes, StructType}
+import org.apache.spark.sql.{Dataset, Row}
+
+import scala.collection.JavaConversions._
+
+class NullRate extends BaseSparkTransform {
+
+  override def process(df: Dataset[Row], env: SparkEnvironment): Dataset[Row] 
= {
+
+    val allCount = env.getSparkSession.sparkContext.longAccumulator("allCount")
+    val fieldsAndRates = 
config.getStringList(NullRateConfig.FIELDS).zip(config.getDoubleList(NullRateConfig.RATES)).filter(fl
 => df.schema.names.contains(fl._1)).toMap
+    val fieldsAndRatesAccumulator = fieldsAndRates.map(fl => {
+      fl._1 -> env.getSparkSession.sparkContext.longAccumulator(fl._1)
+    })
+
+    df.foreachPartition(iter => {
+      while (iter.hasNext) {
+        allCount.add(1L)
+        val row = iter.next()
+        fieldsAndRates.map(fl => fl._1).foreach(field => {
+          val accumulator = fieldsAndRatesAccumulator.get(field).get
+          if (row.get(row.fieldIndex(field)) == null) {
+            accumulator.add(1L)
+          } else {
+            accumulator.add(0L)
+          }
+        })
+      }
+    })
+
+    val allCountValue = allCount.value * 1.00d
+    val nullRateValue = fieldsAndRatesAccumulator.map(fl => {
+      (fl._1, fieldsAndRates.getOrDefault(fl._1, 100.00d), fl._2.value, 
(fl._2.value / allCountValue) * 100d)
+    })
+
+    if (config.hasPath(NullRateConfig.IS_THROWEXCEPTION) && 
config.getBoolean(NullRateConfig.IS_THROWEXCEPTION)) {
+      nullRateValue.foreach(fv => {
+        if (fv._4 > fv._2) {
+          throw new RuntimeException(s"the field(${fv._1}) null rate(${fv._4}) 
is lager then the setting(${fv._2})")
+        }
+      })
+    }
+
+    if (config.hasPath(NullRateConfig.SAVE_TO_TABLE_NAME)) {
+      val nullRateRows = nullRateValue.map(fv => {
+        Row(fv._1, fv._2, fv._3, fv._4)
+      }).toSeq
+
+      val schema = new StructType()
+        .add("field_name", DataTypes.StringType)
+        .add("setting_rate", DataTypes.DoubleType)
+        .add("null_count", DataTypes.LongType)
+        .add("rate_percent", DataTypes.DoubleType)
+      
env.getSparkSession.createDataset(nullRateRows)(RowEncoder(schema)).createOrReplaceTempView(config.getString(NullRateConfig.SAVE_TO_TABLE_NAME))
+    }
+
+    df
+  }
+
+  override def checkConfig(): CheckResult = {
+    val exists = checkAllExists(config, NullRateConfig.FIELDS, 
NullRateConfig.RATES)
+    val equal = if (config.getStringList(NullRateConfig.FIELDS).size() == 
config.getIntList(NullRateConfig.RATES).size()) CheckResult.success()
+    else CheckResult.error(s"the ${NullRateConfig.FIELDS} length is not equal 
${NullRateConfig.RATES} length")
+    val unique = if 
(config.getStringList(NullRateConfig.FIELDS).toList.distinct.size() == 
config.getStringList(NullRateConfig.FIELDS).size()) CheckResult.success()
+    else CheckResult.error(s"the ${NullRateConfig.FIELDS} is not unique")
+    mergeCheckResults(exists, equal, unique)
+  }
+
+
+  override def getPluginName: String = "NullRate"
+}
diff --git 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/scala/org/apache/seatunnel/spark/transform/NullRateConfig.scala
 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/scala/org/apache/seatunnel/spark/transform/NullRateConfig.scala
new file mode 100644
index 000000000..ba04585c5
--- /dev/null
+++ 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/src/main/scala/org/apache/seatunnel/spark/transform/NullRateConfig.scala
@@ -0,0 +1,27 @@
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.spark.transform
+
+object NullRateConfig {
+  val FIELDS = "fields"
+  val RATES = "rates"
+  val IS_THROWEXCEPTION = "throw_exception_enable"
+  val SAVE_TO_TABLE_NAME = "save_to_table_name"
+}

Reply via email to