[ 
https://issues.apache.org/jira/browse/HIVE-25652?focusedWorklogId=672969&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-672969
 ]

ASF GitHub Bot logged work on HIVE-25652:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 02/Nov/21 05:02
            Start Date: 02/Nov/21 05:02
    Worklog Time Spent: 10m 
      Work Description: kasakrisz commented on a change in pull request #2752:
URL: https://github.com/apache/hive/pull/2752#discussion_r740720412



##########
File path: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
##########
@@ -800,19 +809,166 @@ private String getExternal(Table table) {
     return table.getTableType() == TableType.EXTERNAL_TABLE ? "EXTERNAL " : "";
   }
 
-  private String getColumns(Table table) {
-    List<String> columnDescs = new ArrayList<String>();
+  private String getColumns(Table table) throws HiveException{
+    List<String> columnDescs = new ArrayList<>();
+    List<String> columns = 
table.getCols().stream().map(FieldSchema::getName).collect(Collectors.toList());
+    Set<String> notNullColumns = Collections.emptySet();
+    if (NotNullConstraint.isNotEmpty(table.getNotNullConstraint())) {
+      notNullColumns = new 
HashSet<>(table.getNotNullConstraint().getNotNullConstraints().values());
+    }
+
+    Map<String, String> columnDefaultValueMap = Collections.emptyMap();
+    if (DefaultConstraint.isNotEmpty(table.getDefaultConstraint())) {
+      columnDefaultValueMap = 
table.getDefaultConstraint().getColNameToDefaultValueMap();
+    }
+
+    List<SQLCheckConstraint> sqlCheckConstraints;
+    try {
+      sqlCheckConstraints = 
Hive.get().getCheckConstraintList(table.getDbName(), table.getTableName());
+    } catch (NoSuchObjectException e) {
+      throw new HiveException(e);
+    }
+    Map<String, SQLCheckConstraint> columnCheckConstraintsMap = 
sqlCheckConstraints.stream()
+      .filter(SQLCheckConstraint::isSetColumn_name)
+      .collect(Collectors.toMap(SQLCheckConstraint::getColumn_name, 
Function.identity()));
+    List<SQLCheckConstraint> tableCheckConstraints = 
sqlCheckConstraints.stream()
+      .filter(cc -> !cc.isSetColumn_name())
+      .collect(Collectors.toList());
+
     for (FieldSchema column : table.getCols()) {
       String columnType = 
formatType(TypeInfoUtils.getTypeInfoFromTypeString(column.getType()));
-      String columnDesc = "  `" + column.getName() + "` " + columnType;
+      String columnName = column.getName();
+      StringBuilder columnDesc = new StringBuilder();
+      columnDesc.append("  `").append(columnName).append("` 
").append(columnType);
+      if (notNullColumns.contains(columnName)) {
+        columnDesc.append(" NOT NULL");
+      }
+      if (columnDefaultValueMap.containsKey(columnName)) {
+        columnDesc.append(" DEFAULT 
").append(columnDefaultValueMap.get(columnName));
+      }
+      if (columnCheckConstraintsMap.containsKey(columnName)) {
+        
columnDesc.append(getColumnCheckConstraintDesc(columnCheckConstraintsMap.get(columnName),
 columns));
+      }
       if (column.getComment() != null) {
-        columnDesc += " COMMENT '" + 
HiveStringUtils.escapeHiveCommand(column.getComment()) + "'";
+        columnDesc.append(" COMMENT 
'").append(HiveStringUtils.escapeHiveCommand(column.getComment())).append("'");
       }
-      columnDescs.add(columnDesc);
+      columnDescs.add(columnDesc.toString());
     }
+    String pkDesc = getPrimaryKeyDesc(table);
+    if (pkDesc != null) {
+      columnDescs.add(pkDesc);
+    }
+    columnDescs.addAll(getForeignKeyDesc(table));
+    columnDescs.addAll(getTableCheckConstraintDesc(tableCheckConstraints, 
columns));
     return StringUtils.join(columnDescs, ", \n");
   }
 
+  private List<String> getTableCheckConstraintDesc(List<SQLCheckConstraint> 
tableCheckConstraints,
+                                                   List<String> columns) {
+    List<String> ccDescs = new ArrayList<>();
+    for (SQLCheckConstraint constraint: tableCheckConstraints) {
+      String enable = constraint.isEnable_cstr()? " enable": " disable";
+      String validate = constraint.isValidate_cstr()? " validate": " 
novalidate";
+      String rely = constraint.isRely_cstr()? " rely": " norely";
+      String expression = getCheckExpressionWithBackticks(columns, constraint);
+      ccDescs.add("  constraint " + constraint.getDc_name() + " CHECK(" + 
expression +
+        ")" + enable + validate + rely);
+    }
+    return ccDescs;
+  }
+
+  private String getCheckExpressionWithBackticks(List<String> columns, 
SQLCheckConstraint constraint) {
+    TreeMap<Integer, String> indexToCols = new TreeMap<>();
+    String expression = constraint.getCheck_expression();
+    for (String col: columns) {
+      int idx = expression.indexOf(col);
+      if (idx == -1) {
+        continue;
+      }
+      indexToCols.put(idx, col);
+      while (idx + col.length() < expression.length()) {
+        idx = expression.indexOf(col, idx + col.length());
+        if (idx == -1) {
+          break;
+        }
+        indexToCols.put(idx, col);
+      }
+    }

Review comment:
       Is this collects column name strings from literals? 
   Ex.:
   ```
   `Test col` varchar(100) CHECK (`Test col` <> 'Test col') enable novalidate 
rely
   ```
   If yes the check constraint would be altered like
   ```
   `Test col` varchar(100) CHECK (`Test col` <> '`Test col`') enable novalidate 
rely
   ```
   and end up comparing to a different literal.
   
   Please add a test case to `show_create_table.q` to verify this.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 672969)
    Time Spent: 1h 10m  (was: 1h)

> Add constraints in result of “SHOW CREATE TABLE ”
> -------------------------------------------------
>
>                 Key: HIVE-25652
>                 URL: https://issues.apache.org/jira/browse/HIVE-25652
>             Project: Hive
>          Issue Type: Improvement
>            Reporter: Soumyakanti Das
>            Assignee: Soumyakanti Das
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 1h 10m
>  Remaining Estimate: 0h
>
> Currently show create table doesn’t pull any constraint info like not null, 
> defaults, primary key.
> Example:
> Create table
>  
> {code:java}
> CREATE TABLE TEST(
>   col1 varchar(100) NOT NULL COMMENT "comment for column 1",
>   col2 timestamp DEFAULT CURRENT_TIMESTAMP() COMMENT "comment for column 2",
>   col3 decimal,
>   col4 varchar(512) NOT NULL,
>   col5 varchar(100),
>   primary key(col1, col2) disable novalidate)
> ROW FORMAT SERDE
>   'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
> STORED AS INPUTFORMAT
>   'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
> OUTPUTFORMAT
>   'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';
> {code}
> Currently {{SHOW CREATE TABLE TEST}} doesn't show the column constraints.
> {code:java}
> CREATE TABLE `test`(
>   `col1` varchar(100) COMMENT 'comment for column 1', 
>   `col2` timestamp COMMENT 'comment for column 2', 
>   `col3` decimal(10,0), 
>   `col4` varchar(512), 
>   `col5` varchar(100))
> ROW FORMAT SERDE 
>   'org.apache.hadoop.hive.ql.io.orc.OrcSerde' 
> STORED AS INPUTFORMAT 
>   'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
> OUTPUTFORMAT 
>   'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to