kunal642 commented on a change in pull request #4221:
URL: https://github.com/apache/carbondata/pull/4221#discussion_r722963262
##########
File path:
integration/spark/src/main/common2.3and2.4/org/apache/spark/sql/SparkVersionAdapter.scala
##########
@@ -430,6 +430,22 @@ trait SparkVersionAdapter {
def evaluateWithPredicate(exp: Expression, schema: Seq[Attribute], row:
InternalRow): Any = {
InterpretedPredicate.create(exp, schema).expression.eval(row)
}
+
+ def getUpdatedPlan(plan: LogicalPlan, sqlText: String): LogicalPlan = {
+ plan match {
+ case create@CreateTable(tableDesc, mode, query) =>
+ if ( tableDesc.storage.locationUri.isDefined &&
+ !sqlText.toUpperCase.startsWith("CREATE EXTERNAL TABLE ")) {
+ // add a property to differentiate if create table statement has
external keyword or not
+ val newProperties = tableDesc.properties. +("hasexternalkeyword" ->
"false")
+ val updatedTableDesc = tableDesc.copy(properties = newProperties)
+ CreateTable(updatedTableDesc, mode, query)
Review comment:
can we make the table type as external , instead of putting in
properties.
This was no need to check for this keyword and the existing flows can handle
the transactional table creation
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]