LuciferYang commented on code in PR #38171:
URL: https://github.com/apache/spark/pull/38171#discussion_r993391935


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressionsJoni.scala:
##########
@@ -0,0 +1,471 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions
+
+import java.util.Locale
+
+import scala.collection.JavaConverters._
+
+import org.apache.commons.text.StringEscapeUtils
+import org.jcodings.specific.UTF8Encoding
+import org.joni.{Option, Regex, Syntax}
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.codegen._
+import org.apache.spark.sql.catalyst.expressions.codegen.Block._
+import org.apache.spark.sql.catalyst.trees.TreePattern.{LIKE_FAMLIY, 
TreePattern}
+import org.apache.spark.sql.catalyst.util.StringUtils
+import org.apache.spark.sql.types._
+import org.apache.spark.unsafe.types.UTF8String
+
+
+abstract class StringRegexExpressionJoni extends BinaryExpression
+  with ImplicitCastInputTypes with NullIntolerant with Predicate {
+
+  def escape(v: Array[Byte]): Array[Byte]
+  def matches(regex: Regex, str: Array[Byte]): Boolean
+
+  override def dataType: DataType = BooleanType
+  override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
+
+  // try cache foldable pattern
+  private lazy val cache: Regex = right match {
+    case p: Expression if p.foldable =>
+      compile(p.eval().asInstanceOf[UTF8String].getBytes)
+    case _ => null
+  }
+
+  protected def compile(pattern: Array[Byte]): Regex = if (pattern == null) {
+    null
+  } else {
+    // Let it raise exception if couldn't compile the regex string
+    val escapedPattern = escape(pattern)
+    new Regex(escapedPattern, 0, escapedPattern.length,
+      Option.NONE, UTF8Encoding.INSTANCE, Syntax.Java)
+  }
+
+  protected def pattern(pattern: Array[Byte]) = if (cache == null) 
compile(pattern) else cache
+
+  protected override def nullSafeEval(input1: Any, input2: Any): Any = {
+    val regex = pattern(input2.asInstanceOf[UTF8String].getBytes)
+    if(regex == null) {
+      null
+    } else {
+      matches(regex, input1.asInstanceOf[UTF8String].getBytes)
+    }
+  }
+
+  override def sql: String = s"${left.sql} 
${prettyName.toUpperCase(Locale.ROOT)} ${right.sql}"
+}
+
+// scalastyle:off line.contains.tab
+/**
+ * Simple RegEx pattern matching function
+ */
+@ExpressionDescription(
+  usage = "str _FUNC_ pattern[ ESCAPE escape] - Returns true if str matches 
`pattern` with " +
+    "`escape`, null if any arguments are null, false otherwise.",
+  arguments = """
+    Arguments:
+      * str - a string expression
+      * pattern - a string expression. The pattern is a string which is 
matched literally, with
+          exception to the following special symbols:
+          exception to the following special symbols:<br><br>
+            _ matches any one character in the input (similar to . in posix 
regular expressions)
+          % matches zero or more characters in the input (similar to .* in 
posix regular
+          expressions)
+          expressions)<br><br>
+          Since Spark 2.0, string literals are unescaped in our SQL parser. 
For example, in order
+          to match "\abc", the pattern should be "\\abc".
+          When SQL config 'spark.sql.parser.escapedStringLiterals' is enabled, 
it fallbacks
+          to Spark 1.6 behavior regarding string literal parsing. For example, 
if the config is
+          enabled, the pattern to match "\abc" should be "\abc".
+      * escape - an character added since Spark 3.0. The default escape 
character is the '\'.
+          If an escape character precedes a special symbol or another escape 
character, the
+          following character is matched literally. It is invalid to escape 
any other character.
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_('Spark', '_park');
+      true
+      > SET spark.sql.parser.escapedStringLiterals=true;
+      spark.sql.parser.escapedStringLiterals   true
+      > SELECT '%SystemDrive%\Users\John' _FUNC_ '\%SystemDrive\%\\Users%';
+      true
+      > SET spark.sql.parser.escapedStringLiterals=false;
+      spark.sql.parser.escapedStringLiterals   false
+      > SELECT '%SystemDrive%\\Users\\John' _FUNC_ '\%SystemDrive\%\\\\Users%';
+      true
+      > SELECT '%SystemDrive%/Users/John' _FUNC_ '/%SystemDrive/%//Users%' 
ESCAPE '/';
+      true
+  """,
+  note = """
+    Use RLIKE to match with standard regular expressions.
+  """,
+  since = "1.0.0")

Review Comment:
   since = "3.4.0" ? 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to