MaxGekk commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1004862763


##########
core/src/main/resources/error/error-classes.json:
##########
@@ -644,6 +669,66 @@
     ],
     "sqlState" : "42000"
   },
+  "PROTOBUF_CLASS_LOAD_ERROR" : {
+    "message" : [
+      "Could not load Protobuf class with name <protobufClassName> 
<errorMessage>"
+    ]
+  },
+  "PROTOBUF_DEPENDENCY_ERROR" : {
+    "message" : [
+      "Could not find dependency: <dependencyName>"
+    ]
+  },
+  "PROTOBUF_DESCRIPTOR_ERROR" : {
+    "message" : [
+      "Error parsing descriptor byte[] into Descriptor object Error: 
<errorMessage>"
+    ]
+  },
+  "PROTOBUF_DESCRIPTOR_PARSING_ERROR" : {
+    "message" : [
+      "Error constructing FileDescriptor, Error: <errorMessage>"

Review Comment:
   Could you don't pass arbitrary `errorMessage`. In the future, 
`error-classes.json` might be translated to a local language. An error message 
in mixed languages won't look nice.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala:
##########
@@ -3210,4 +3211,189 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("expression" -> toSQLExpr(expression))
     )
   }
+
+  def cannotConvertProtobufTypeToSqlTypeError(
+    protobufColumn: String,
+    sqlColumn: String,
+    protobufType: String,
+    sqlType: DataType): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR",
+      messageParameters = Map(
+        "protobufColumn" -> protobufColumn,
+        "sqlColumn" -> sqlColumn,

Review Comment:
   It is an identifier, so, quote it by `toSQLId()`.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala:
##########
@@ -3210,4 +3211,189 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("expression" -> toSQLExpr(expression))
     )
   }
+
+  def cannotConvertProtobufTypeToSqlTypeError(
+    protobufColumn: String,
+    sqlColumn: String,
+    protobufType: String,
+    sqlType: DataType): Throwable = {

Review Comment:
   Fix indentation, see 
https://github.com/databricks/scala-style-guide#spacing-and-indentation



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala:
##########
@@ -3210,4 +3211,189 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("expression" -> toSQLExpr(expression))
     )
   }
+
+  def cannotConvertProtobufTypeToSqlTypeError(
+    protobufColumn: String,
+    sqlColumn: String,
+    protobufType: String,
+    sqlType: DataType): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR",
+      messageParameters = Map(
+        "protobufColumn" -> protobufColumn,
+        "sqlColumn" -> sqlColumn,
+        "protobufType" -> protobufType,
+        "sqlType" -> toSQLType(sqlType)))
+  }
+
+  def cannotConvertCatalystTypeToProtobufTypeError(
+    sqlColumn: String,
+    protobufColumn: String,
+    sqlType: DataType,
+    protobufType: String): Throwable = {
+    new AnalysisException(
+      errorClass = "SQL_TYPE_TO_PROTOBUF_TYPE_ERROR",
+      messageParameters = Map(
+        "sqlColumn" -> sqlColumn,
+        "protobufColumn" -> protobufColumn,
+        "sqlType" -> toSQLType(sqlType),
+        "protobufType" -> protobufType))
+  }
+
+  def cannotConvertCatalystTypeToProtobufEnumTypeError(
+    sqlColumn: String,
+    protobufColumn: String,
+    data: String,
+    enumString: String): Throwable = {
+    new AnalysisException(
+      errorClass = "CATALYST_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR",
+      messageParameters = Map(
+        "sqlColumn" -> sqlColumn,
+        "protobufColumn" -> protobufColumn,
+        "data" -> data,
+        "enumString" -> enumString))
+  }
+
+  def cannotConvertProtobufTypeToCatalystTypeError(
+    protobufType: String,
+    sqlType: DataType,
+    e1: Throwable): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR",
+      messageParameters = Map(
+        "protobufType" -> protobufType,
+        "toType" -> toSQLType(sqlType)),
+      cause = Some(e1.getCause))
+  }
+
+  def cannotConvertSqlTypeToProtobufError(
+    protobufType: String,
+    sqlType: DataType,
+    e1: Throwable): Throwable = {
+    new AnalysisException(
+      errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
+      messageParameters = Map(
+        "protobufType" -> protobufType,
+        "toType" -> toSQLType(sqlType)),
+      cause = Some(e1.getCause))
+  }
+
+  def protobufTypeUnsupportedYetError(protobufType: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_TYPE_NOT_SUPPORT_ERROR",
+      messageParameters = Map("protobufType" -> protobufType))
+  }
+
+  def unknownProtobufMessageTypeError(
+    descriptorName: String,
+    containingType: String): Throwable = {
+    new AnalysisException(
+      errorClass = "UNKNOWN_PROTOBUF_MESSAGE_TYPE",
+      messageParameters = Map(
+        "descriptorName" -> descriptorName,
+        "containingType" -> containingType))
+  }
+
+  def cannotFindCatalystTypeInProtobufSchemaError(catalystFieldPath: String): 
Throwable = {
+    new AnalysisException(
+      errorClass = "NO_CATALYST_TYPE_IN_PROTOBUF_SCHEMA",
+      messageParameters = Map("catalystFieldPath" -> catalystFieldPath))
+  }
+
+  def cannotFindProtobufFieldInCatalystError(field: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_FIELD_MISSING_IN_CATALYST_SCHEMA",
+      messageParameters = Map("field" -> field))
+  }
+
+  def protobufFieldMatchError(
+    field: String,
+    protobufSchema: String,
+    matchSize: String,
+    matches: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_FIELD_MISSING_ERROR",
+      messageParameters = Map(
+        "field" -> field,
+        "protobufSchema" -> protobufSchema,
+        "matchSize" -> matchSize,
+        "matches" -> matches))
+  }
+
+  def unableToLocateProtobufMessageError(messageName: String): Throwable = {
+    new AnalysisException(
+      errorClass = "UNABLE_TO_LOCATE_PROTOBUF_MESSAGE_ERROR",
+      messageParameters = Map("messageName" -> messageName))
+  }
+
+  def descrioptorParseError(e1: Throwable): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_DESCRIPTOR_ERROR",
+      messageParameters = Map("errorMessage" -> e1.getMessage()),
+      cause = Some(e1.getCause))
+  }
+
+  def cannotFindDescriptorFileError(filePath: String, e1: Throwable): 
Throwable = {
+    new AnalysisException(
+      errorClass = "CANNOT_FIND_PROTOBUF_DESCRIPTOR_FILE_ERROR",
+      messageParameters = Map("filePath" -> filePath),
+      cause = Some(e1.getCause))
+  }
+
+  def noProtobufMessageTypeReturnError(descriptorName: String): Throwable = {
+    new AnalysisException(
+      errorClass = "NO_PROTOBUF_MESSAGE_TYPE_ERROR",
+      messageParameters = Map("descriptorName" -> descriptorName))
+  }
+
+  def failedParsingDescriptorError(e1: Throwable): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_DESCRIPTOR_PARSING_ERROR",
+      messageParameters = Map("errorMessage" -> e1.getMessage()),
+      cause = Some(e1.getCause))
+  }
+
+  def foundRecursionInProtobufSchema(fieldDescriptor: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_RECURSION_ERROR",
+      messageParameters = Map("fieldDescriptor" -> fieldDescriptor))
+  }
+
+  def protobufFieldTypeMismatchError(field: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_FIELD_TYPE_MISMATCH",
+      messageParameters = Map("field" -> field))
+  }
+
+  def protobufClassLoadError(protobufClassName: String, errorMessage: String): 
Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_CLASS_LOAD_ERROR",
+      messageParameters = Map(
+        "protobufClassName" -> protobufClassName,
+        "errorMessage" -> errorMessage))
+  }
+
+  def protobufMessageTypeError(protobufClassName: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_MESSAGE_TYPE_ERROR",
+      messageParameters = Map("protobufClassName" -> protobufClassName))
+  }
+
+  def protobufDescriptorDependencyError(dependencyName: String): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_DEPENDENCY_ERROR",
+      messageParameters = Map("dependencyName" -> dependencyName))
+  }
+
+  def invalidByteStringFormatError(): Throwable = {
+    new AnalysisException(errorClass = "INVALID_BYTE_STRING_ERROR", 
messageParameters = Map.empty)
+  }
+
+  def malformedRecordsDetectedInRecordParsingError(e1: Throwable): Throwable = 
{

Review Comment:
   nit: e1 -> cause



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala:
##########
@@ -3210,4 +3211,189 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("expression" -> toSQLExpr(expression))
     )
   }
+
+  def cannotConvertProtobufTypeToSqlTypeError(
+    protobufColumn: String,
+    sqlColumn: String,
+    protobufType: String,
+    sqlType: DataType): Throwable = {
+    new AnalysisException(
+      errorClass = "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR",
+      messageParameters = Map(
+        "protobufColumn" -> protobufColumn,
+        "sqlColumn" -> sqlColumn,
+        "protobufType" -> protobufType,
+        "sqlType" -> toSQLType(sqlType)))
+  }
+
+  def cannotConvertCatalystTypeToProtobufTypeError(
+    sqlColumn: String,
+    protobufColumn: String,
+    sqlType: DataType,
+    protobufType: String): Throwable = {
+    new AnalysisException(
+      errorClass = "SQL_TYPE_TO_PROTOBUF_TYPE_ERROR",
+      messageParameters = Map(
+        "sqlColumn" -> sqlColumn,

Review Comment:
   ditto.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to