[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-28 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/carbondata/pull/3019


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-28 Thread kumarvishal09
Github user kumarvishal09 commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244104691
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java
 ---
@@ -43,63 +43,78 @@
 
 import static org.apache.carbondata.presto.Types.checkType;
 
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveColumnHandle;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePageSourceProvider;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSplit;
 import com.facebook.presto.spi.ColumnHandle;
 import com.facebook.presto.spi.ConnectorPageSource;
 import com.facebook.presto.spi.ConnectorSession;
 import com.facebook.presto.spi.ConnectorSplit;
-import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.SchemaTableName;
 import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
+import com.facebook.presto.spi.type.TypeManager;
 import com.google.common.collect.ImmutableList;
 import com.google.inject.Inject;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskAttemptContextImpl;
 import org.apache.hadoop.mapred.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
-
 /**
  * Provider Class for Carbondata Page Source class.
  */
-public class CarbondataPageSourceProvider implements 
ConnectorPageSourceProvider {
+public class CarbondataPageSourceProvider extends HivePageSourceProvider {
 
-  private String connectorId;
   private CarbonTableReader carbonTableReader;
   private String queryId ;
-
-  @Inject public CarbondataPageSourceProvider(CarbondataConnectorId 
connectorId,
+  private HdfsEnvironment hdfsEnvironment;
+
+  @Inject public CarbondataPageSourceProvider(
+  HiveClientConfig hiveClientConfig,
+  HdfsEnvironment hdfsEnvironment,
+  Set cursorProviders,
+  Set pageSourceFactories,
+  TypeManager typeManager,
   CarbonTableReader carbonTableReader) {
-this.connectorId = requireNonNull(connectorId, "connectorId is 
null").toString();
+super(hiveClientConfig, hdfsEnvironment, cursorProviders, 
pageSourceFactories, typeManager);
 this.carbonTableReader = requireNonNull(carbonTableReader, 
"carbonTableReader is null");
+this.hdfsEnvironment = hdfsEnvironment;
   }
 
   @Override
   public ConnectorPageSource createPageSource(ConnectorTransactionHandle 
transactionHandle,
   ConnectorSession session, ConnectorSplit split, List 
columns) {
-this.queryId = ((CarbondataSplit)split).getQueryId();
+HiveSplit carbonSplit =
+checkType(split, HiveSplit.class, "split is not class HiveSplit");
+if (carbonSplit.getSchema().getProperty("queryId") == null) {
+  return super.createPageSource(transactionHandle, session, split, 
columns);
+}
+this.queryId = carbonSplit.getSchema().getProperty("queryId");
--- End diff --

Move this line above If condition and in if condition check if queryId is 
null


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-28 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244278709
  
--- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
 ---
@@ -157,7 +157,7 @@ case class CarbonCreateTableCommand(
  |  tableName "$tableName",
  |  dbName "$dbName",
  |  tablePath "$tablePath",
- |  path "$tablePath",
+ |  path "${FileFactory.addSchemeIfNotExists(tablePath)}",
--- End diff --

ok


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-28 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244278266
  
--- Diff: 
core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java 
---
@@ -369,6 +369,24 @@ public static boolean createNewLockFile(String 
filePath, FileType fileType) thro
 LOCAL, HDFS, ALLUXIO, VIEWFS, S3
   }
 
+  public static String addSchemeIfNotExists(String filePath) {
--- End diff --

ok


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244278116
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataModule.java
 ---
@@ -17,62 +17,150 @@
 
 package org.apache.carbondata.presto;
 
-import javax.inject.Inject;
+import java.util.function.Supplier;
 
 import static java.util.Objects.requireNonNull;
 
-import org.apache.carbondata.presto.impl.CarbonTableConfig;
 import org.apache.carbondata.presto.impl.CarbonTableReader;
 
+import com.facebook.presto.hive.CoercionPolicy;
+import com.facebook.presto.hive.DirectoryLister;
+import com.facebook.presto.hive.FileFormatDataSourceStats;
+import com.facebook.presto.hive.GenericHiveRecordCursorProvider;
+import com.facebook.presto.hive.HadoopDirectoryLister;
+import com.facebook.presto.hive.HdfsConfiguration;
+import com.facebook.presto.hive.HdfsConfigurationUpdater;
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveClientModule;
+import com.facebook.presto.hive.HiveCoercionPolicy;
+import com.facebook.presto.hive.HiveConnectorId;
+import com.facebook.presto.hive.HiveEventClient;
+import com.facebook.presto.hive.HiveFileWriterFactory;
+import com.facebook.presto.hive.HiveHdfsConfiguration;
+import com.facebook.presto.hive.HiveLocationService;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveNodePartitioningProvider;
+import com.facebook.presto.hive.HivePageSinkProvider;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePartitionManager;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveSplitManager;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.HiveTypeTranslator;
+import com.facebook.presto.hive.HiveWriterStats;
+import com.facebook.presto.hive.LocationService;
+import com.facebook.presto.hive.NamenodeStats;
+import com.facebook.presto.hive.OrcFileWriterConfig;
+import com.facebook.presto.hive.OrcFileWriterFactory;
+import com.facebook.presto.hive.PartitionUpdate;
+import com.facebook.presto.hive.RcFileFileWriterFactory;
+import com.facebook.presto.hive.TableParameterCodec;
+import com.facebook.presto.hive.TransactionalMetadata;
+import com.facebook.presto.hive.TypeTranslator;
+import com.facebook.presto.hive.orc.DwrfPageSourceFactory;
+import com.facebook.presto.hive.orc.OrcPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetRecordCursorProvider;
+import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
 import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
 import com.facebook.presto.spi.connector.ConnectorSplitManager;
-import com.facebook.presto.spi.type.Type;
-import com.facebook.presto.spi.type.TypeManager;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer;
 import com.google.inject.Binder;
-import com.google.inject.Module;
 import com.google.inject.Scopes;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import io.airlift.event.client.EventClient;
 
-import static 
com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
-import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.inject.multibindings.Multibinder.newSetBinder;
 import static io.airlift.configuration.ConfigBinder.configBinder;
+import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
 
-public class CarbondataModule implements Module {
+import static org.weakref.jmx.ObjectNames.generatedNameOf;
+import static org.weakref.jmx.guice.ExportBinder.newExporter;
+
+public class CarbondataModule extends HiveClientModule {
--- End diff --

ok



---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244276202
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/PrestoFilterUtil.java
 ---
@@ -78,32 +72,33 @@
   private static final String HIVE_DEFAULT_DYNAMIC_PARTITION = 
"__HIVE_DEFAULT_PARTITION__";
 
   /**
-   * @param carbondataColumnHandle
+   * @param columnHandle
* @return
*/
-  private static DataType spi2CarbondataTypeMapper(CarbondataColumnHandle 
carbondataColumnHandle) {
-Type colType = carbondataColumnHandle.getColumnType();
-if (colType == BooleanType.BOOLEAN) {
+  private static DataType spi2CarbondataTypeMapper(HiveColumnHandle 
columnHandle) {
+HiveType colType = columnHandle.getHiveType();
+if (colType.equals(HiveType.HIVE_BOOLEAN)) {
   return DataTypes.BOOLEAN;
-} else if (colType == SmallintType.SMALLINT) {
+} else if (colType.equals(HiveType.HIVE_SHORT)) {
   return DataTypes.SHORT;
-} else if (colType == IntegerType.INTEGER) {
+} else if (colType.equals(HiveType.HIVE_INT)) {
   return DataTypes.INT;
-} else if (colType == BigintType.BIGINT) {
+} else if (colType.equals(HiveType.HIVE_LONG)) {
   return DataTypes.LONG;
-} else if (colType == DoubleType.DOUBLE) {
+} else if (colType.equals(HiveType.HIVE_DOUBLE)) {
   return DataTypes.DOUBLE;
-} else if (colType == VarcharType.VARCHAR) {
+} else if (colType.equals(HiveType.HIVE_STRING)) {
   return DataTypes.STRING;
-} else if (colType == DateType.DATE) {
+} else if (colType.equals(HiveType.HIVE_DATE)) {
   return DataTypes.DATE;
-} else if (colType == TimestampType.TIMESTAMP) {
+} else if (colType.equals(HiveType.HIVE_TIMESTAMP)) {
   return DataTypes.TIMESTAMP;
-} else if 
(colType.equals(DecimalType.createDecimalType(carbondataColumnHandle.getPrecision(),
-carbondataColumnHandle.getScale( {
-  return 
DataTypes.createDecimalType(carbondataColumnHandle.getPrecision(),
-  carbondataColumnHandle.getScale());
-} else {
+}
+else if (colType.getTypeInfo() instanceof DecimalTypeInfo) {
--- End diff --

ok


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244276046
  
--- Diff: 
integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
 ---
@@ -80,7 +80,7 @@ object CarbonDataStoreCreator {
   UUID.randomUUID().toString))
   //   val factFilePath: String = new 
File(dataFilePath).getCanonicalPath
   val storeDir: File = new File(absoluteTableIdentifier.getTablePath)
-  CarbonUtil.deleteFoldersAndFiles(storeDir)
+//  CarbonUtil.deleteFoldersAndFiles(storeDir)
--- End diff --

ok


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244269584
  
--- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
 ---
@@ -157,7 +157,7 @@ case class CarbonCreateTableCommand(
  |  tableName "$tableName",
  |  dbName "$dbName",
  |  tablePath "$tablePath",
- |  path "$tablePath",
+ |  path "${FileFactory.addSchemeIfNotExists(tablePath)}",
--- End diff --

Can you update the comment from line 147 to 150. I feel it need to be 
rephrased


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244269478
  
--- Diff: 
core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java 
---
@@ -369,6 +369,24 @@ public static boolean createNewLockFile(String 
filePath, FileType fileType) thro
 LOCAL, HDFS, ALLUXIO, VIEWFS, S3
   }
 
+  public static String addSchemeIfNotExists(String filePath) {
--- End diff --

add comment


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244269435
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataModule.java
 ---
@@ -17,62 +17,150 @@
 
 package org.apache.carbondata.presto;
 
-import javax.inject.Inject;
+import java.util.function.Supplier;
 
 import static java.util.Objects.requireNonNull;
 
-import org.apache.carbondata.presto.impl.CarbonTableConfig;
 import org.apache.carbondata.presto.impl.CarbonTableReader;
 
+import com.facebook.presto.hive.CoercionPolicy;
+import com.facebook.presto.hive.DirectoryLister;
+import com.facebook.presto.hive.FileFormatDataSourceStats;
+import com.facebook.presto.hive.GenericHiveRecordCursorProvider;
+import com.facebook.presto.hive.HadoopDirectoryLister;
+import com.facebook.presto.hive.HdfsConfiguration;
+import com.facebook.presto.hive.HdfsConfigurationUpdater;
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveClientModule;
+import com.facebook.presto.hive.HiveCoercionPolicy;
+import com.facebook.presto.hive.HiveConnectorId;
+import com.facebook.presto.hive.HiveEventClient;
+import com.facebook.presto.hive.HiveFileWriterFactory;
+import com.facebook.presto.hive.HiveHdfsConfiguration;
+import com.facebook.presto.hive.HiveLocationService;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveNodePartitioningProvider;
+import com.facebook.presto.hive.HivePageSinkProvider;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePartitionManager;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveSplitManager;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.HiveTypeTranslator;
+import com.facebook.presto.hive.HiveWriterStats;
+import com.facebook.presto.hive.LocationService;
+import com.facebook.presto.hive.NamenodeStats;
+import com.facebook.presto.hive.OrcFileWriterConfig;
+import com.facebook.presto.hive.OrcFileWriterFactory;
+import com.facebook.presto.hive.PartitionUpdate;
+import com.facebook.presto.hive.RcFileFileWriterFactory;
+import com.facebook.presto.hive.TableParameterCodec;
+import com.facebook.presto.hive.TransactionalMetadata;
+import com.facebook.presto.hive.TypeTranslator;
+import com.facebook.presto.hive.orc.DwrfPageSourceFactory;
+import com.facebook.presto.hive.orc.OrcPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetRecordCursorProvider;
+import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
 import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
 import com.facebook.presto.spi.connector.ConnectorSplitManager;
-import com.facebook.presto.spi.type.Type;
-import com.facebook.presto.spi.type.TypeManager;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer;
 import com.google.inject.Binder;
-import com.google.inject.Module;
 import com.google.inject.Scopes;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import io.airlift.event.client.EventClient;
 
-import static 
com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
-import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.inject.multibindings.Multibinder.newSetBinder;
 import static io.airlift.configuration.ConfigBinder.configBinder;
+import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
 
-public class CarbondataModule implements Module {
+import static org.weakref.jmx.ObjectNames.generatedNameOf;
+import static org.weakref.jmx.guice.ExportBinder.newExporter;
+
+public class CarbondataModule extends HiveClientModule {
--- End diff --

Please add comment for this class


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244254924
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/PrestoFilterUtil.java
 ---
@@ -78,32 +72,33 @@
   private static final String HIVE_DEFAULT_DYNAMIC_PARTITION = 
"__HIVE_DEFAULT_PARTITION__";
 
   /**
-   * @param carbondataColumnHandle
+   * @param columnHandle
* @return
*/
-  private static DataType spi2CarbondataTypeMapper(CarbondataColumnHandle 
carbondataColumnHandle) {
-Type colType = carbondataColumnHandle.getColumnType();
-if (colType == BooleanType.BOOLEAN) {
+  private static DataType spi2CarbondataTypeMapper(HiveColumnHandle 
columnHandle) {
+HiveType colType = columnHandle.getHiveType();
+if (colType.equals(HiveType.HIVE_BOOLEAN)) {
   return DataTypes.BOOLEAN;
-} else if (colType == SmallintType.SMALLINT) {
+} else if (colType.equals(HiveType.HIVE_SHORT)) {
   return DataTypes.SHORT;
-} else if (colType == IntegerType.INTEGER) {
+} else if (colType.equals(HiveType.HIVE_INT)) {
   return DataTypes.INT;
-} else if (colType == BigintType.BIGINT) {
+} else if (colType.equals(HiveType.HIVE_LONG)) {
   return DataTypes.LONG;
-} else if (colType == DoubleType.DOUBLE) {
+} else if (colType.equals(HiveType.HIVE_DOUBLE)) {
   return DataTypes.DOUBLE;
-} else if (colType == VarcharType.VARCHAR) {
+} else if (colType.equals(HiveType.HIVE_STRING)) {
   return DataTypes.STRING;
-} else if (colType == DateType.DATE) {
+} else if (colType.equals(HiveType.HIVE_DATE)) {
   return DataTypes.DATE;
-} else if (colType == TimestampType.TIMESTAMP) {
+} else if (colType.equals(HiveType.HIVE_TIMESTAMP)) {
   return DataTypes.TIMESTAMP;
-} else if 
(colType.equals(DecimalType.createDecimalType(carbondataColumnHandle.getPrecision(),
-carbondataColumnHandle.getScale( {
-  return 
DataTypes.createDecimalType(carbondataColumnHandle.getPrecision(),
-  carbondataColumnHandle.getScale());
-} else {
+}
+else if (colType.getTypeInfo() instanceof DecimalTypeInfo) {
+  DecimalTypeInfo typeInfo = (DecimalTypeInfo) colType.getTypeInfo();
+  return 
DataTypes.createDecimalType(typeInfo.getPrecision(),typeInfo.getScale());
+}
+else {
--- End diff --

move up


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244254915
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/PrestoFilterUtil.java
 ---
@@ -78,32 +72,33 @@
   private static final String HIVE_DEFAULT_DYNAMIC_PARTITION = 
"__HIVE_DEFAULT_PARTITION__";
 
   /**
-   * @param carbondataColumnHandle
+   * @param columnHandle
* @return
*/
-  private static DataType spi2CarbondataTypeMapper(CarbondataColumnHandle 
carbondataColumnHandle) {
-Type colType = carbondataColumnHandle.getColumnType();
-if (colType == BooleanType.BOOLEAN) {
+  private static DataType spi2CarbondataTypeMapper(HiveColumnHandle 
columnHandle) {
+HiveType colType = columnHandle.getHiveType();
+if (colType.equals(HiveType.HIVE_BOOLEAN)) {
   return DataTypes.BOOLEAN;
-} else if (colType == SmallintType.SMALLINT) {
+} else if (colType.equals(HiveType.HIVE_SHORT)) {
   return DataTypes.SHORT;
-} else if (colType == IntegerType.INTEGER) {
+} else if (colType.equals(HiveType.HIVE_INT)) {
   return DataTypes.INT;
-} else if (colType == BigintType.BIGINT) {
+} else if (colType.equals(HiveType.HIVE_LONG)) {
   return DataTypes.LONG;
-} else if (colType == DoubleType.DOUBLE) {
+} else if (colType.equals(HiveType.HIVE_DOUBLE)) {
   return DataTypes.DOUBLE;
-} else if (colType == VarcharType.VARCHAR) {
+} else if (colType.equals(HiveType.HIVE_STRING)) {
   return DataTypes.STRING;
-} else if (colType == DateType.DATE) {
+} else if (colType.equals(HiveType.HIVE_DATE)) {
   return DataTypes.DATE;
-} else if (colType == TimestampType.TIMESTAMP) {
+} else if (colType.equals(HiveType.HIVE_TIMESTAMP)) {
   return DataTypes.TIMESTAMP;
-} else if 
(colType.equals(DecimalType.createDecimalType(carbondataColumnHandle.getPrecision(),
-carbondataColumnHandle.getScale( {
-  return 
DataTypes.createDecimalType(carbondataColumnHandle.getPrecision(),
-  carbondataColumnHandle.getScale());
-} else {
+}
+else if (colType.getTypeInfo() instanceof DecimalTypeInfo) {
--- End diff --

move up 


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244254822
  
--- Diff: 
integration/presto/src/test/scala/org/apache/carbondata/presto/server/PrestoServer.scala
 ---
@@ -129,6 +130,21 @@ class PrestoServer {
 }
   }
 
+  def execute(query: String) = {
+
+Try {
+  val conn: Connection = createJdbcConnection(dbName)
+  logger.info(s"* executing the query * \n $query")
--- End diff --

rename logger to LOGGER


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244254647
  
--- Diff: 
integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
 ---
@@ -80,7 +80,7 @@ object CarbonDataStoreCreator {
   UUID.randomUUID().toString))
   //   val factFilePath: String = new 
File(dataFilePath).getCanonicalPath
   val storeDir: File = new File(absoluteTableIdentifier.getTablePath)
-  CarbonUtil.deleteFoldersAndFiles(storeDir)
+//  CarbonUtil.deleteFoldersAndFiles(storeDir)
--- End diff --

delete it if not required, same for line 81


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread jackylk
Github user jackylk commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244172111
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataConnectorFactory.java
 ---
@@ -17,69 +17,177 @@
 
 package org.apache.carbondata.presto;
 
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.*;
 import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 
 import static java.util.Objects.requireNonNull;
 
-import com.facebook.presto.spi.ConnectorHandleResolver;
+import org.apache.carbondata.presto.impl.CarbonTableConfig;
+
+import com.facebook.presto.hive.HiveConnector;
+import com.facebook.presto.hive.HiveConnectorFactory;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveProcedureModule;
+import com.facebook.presto.hive.HiveSchemaProperties;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveStorageFormat;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.NodeVersion;
+import com.facebook.presto.hive.RebindSafeMBeanServer;
+import com.facebook.presto.hive.authentication.HiveAuthenticationModule;
+import com.facebook.presto.hive.metastore.HiveMetastoreModule;
+import com.facebook.presto.hive.s3.HiveS3Module;
+import com.facebook.presto.hive.security.HiveSecurityModule;
+import com.facebook.presto.hive.security.PartitionsAwareAccessControl;
+import com.facebook.presto.spi.NodeManager;
+import com.facebook.presto.spi.PageIndexerFactory;
+import com.facebook.presto.spi.PageSorter;
 import com.facebook.presto.spi.classloader.ThreadContextClassLoader;
-import com.facebook.presto.spi.connector.*;
-import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorMetadata;
+import com.facebook.presto.spi.connector.Connector;
+import com.facebook.presto.spi.connector.ConnectorAccessControl;
+import com.facebook.presto.spi.connector.ConnectorContext;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.connector.ConnectorSplitManager;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSinkProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorSplitManager;
-import com.google.common.base.Throwables;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider;
+import com.facebook.presto.spi.procedure.Procedure;
+import com.facebook.presto.spi.type.TypeManager;
+import com.google.common.collect.ImmutableSet;
 import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.TypeLiteral;
 import io.airlift.bootstrap.Bootstrap;
 import io.airlift.bootstrap.LifeCycleManager;
+import io.airlift.event.client.EventModule;
 import io.airlift.json.JsonModule;
+import io.airlift.units.DataSize;
+import org.weakref.jmx.guice.MBeanModule;
+import sun.reflect.ConstructorAccessor;
 
+import static com.google.common.base.Throwables.throwIfUnchecked;
+import static io.airlift.configuration.ConfigBinder.configBinder;
 
 /**
  * Build Carbondata Connector
  * It will be called by CarbondataPlugin
  */
-public class CarbondataConnectorFactory implements ConnectorFactory {
+public class CarbondataConnectorFactory extends HiveConnectorFactory {
 
-  private final String name;
   private final ClassLoader classLoader;
 
   public CarbondataConnectorFactory(String connectorName, ClassLoader 
classLoader) {
-this.name = connectorName;
+super(connectorName, classLoader, null);
 this.classLoader = requireNonNull(classLoader, "classLoader is null");
   }
 
-  @Override public String getName() {
-return name;
-  }
-
-  @Override public ConnectorHandleResolver getHandleResolver() {
-return new CarbondataHandleResolver();
-  }
-
-  @Override public Connector create(String connectorId, Map config,
+  @Override public Connector create(String catalogName, Map config,
   ConnectorContext context) {
 requireNonNull(config, "config is null");
 
 try (ThreadContextClassLoader ignored = new 
ThreadContextClassLoader(classLoader)) {
-  Bootstrap app = new Bootstrap(new JsonModule(),
-  new 

[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-27 Thread ajantha-bhat
Github user ajantha-bhat commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244153304
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataConnectorFactory.java
 ---
@@ -17,69 +17,177 @@
 
 package org.apache.carbondata.presto;
 
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.*;
 import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 
 import static java.util.Objects.requireNonNull;
 
-import com.facebook.presto.spi.ConnectorHandleResolver;
+import org.apache.carbondata.presto.impl.CarbonTableConfig;
+
+import com.facebook.presto.hive.HiveConnector;
+import com.facebook.presto.hive.HiveConnectorFactory;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveProcedureModule;
+import com.facebook.presto.hive.HiveSchemaProperties;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveStorageFormat;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.NodeVersion;
+import com.facebook.presto.hive.RebindSafeMBeanServer;
+import com.facebook.presto.hive.authentication.HiveAuthenticationModule;
+import com.facebook.presto.hive.metastore.HiveMetastoreModule;
+import com.facebook.presto.hive.s3.HiveS3Module;
+import com.facebook.presto.hive.security.HiveSecurityModule;
+import com.facebook.presto.hive.security.PartitionsAwareAccessControl;
+import com.facebook.presto.spi.NodeManager;
+import com.facebook.presto.spi.PageIndexerFactory;
+import com.facebook.presto.spi.PageSorter;
 import com.facebook.presto.spi.classloader.ThreadContextClassLoader;
-import com.facebook.presto.spi.connector.*;
-import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorMetadata;
+import com.facebook.presto.spi.connector.Connector;
+import com.facebook.presto.spi.connector.ConnectorAccessControl;
+import com.facebook.presto.spi.connector.ConnectorContext;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.connector.ConnectorSplitManager;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSinkProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorSplitManager;
-import com.google.common.base.Throwables;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider;
+import com.facebook.presto.spi.procedure.Procedure;
+import com.facebook.presto.spi.type.TypeManager;
+import com.google.common.collect.ImmutableSet;
 import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.TypeLiteral;
 import io.airlift.bootstrap.Bootstrap;
 import io.airlift.bootstrap.LifeCycleManager;
+import io.airlift.event.client.EventModule;
 import io.airlift.json.JsonModule;
+import io.airlift.units.DataSize;
+import org.weakref.jmx.guice.MBeanModule;
+import sun.reflect.ConstructorAccessor;
 
+import static com.google.common.base.Throwables.throwIfUnchecked;
+import static io.airlift.configuration.ConfigBinder.configBinder;
 
 /**
  * Build Carbondata Connector
  * It will be called by CarbondataPlugin
  */
-public class CarbondataConnectorFactory implements ConnectorFactory {
+public class CarbondataConnectorFactory extends HiveConnectorFactory {
 
-  private final String name;
   private final ClassLoader classLoader;
 
   public CarbondataConnectorFactory(String connectorName, ClassLoader 
classLoader) {
-this.name = connectorName;
+super(connectorName, classLoader, null);
 this.classLoader = requireNonNull(classLoader, "classLoader is null");
   }
 
-  @Override public String getName() {
-return name;
-  }
-
-  @Override public ConnectorHandleResolver getHandleResolver() {
-return new CarbondataHandleResolver();
-  }
-
-  @Override public Connector create(String connectorId, Map config,
+  @Override public Connector create(String catalogName, Map config,
   ConnectorContext context) {
 requireNonNull(config, "config is null");
 
 try (ThreadContextClassLoader ignored = new 
ThreadContextClassLoader(classLoader)) {
-  Bootstrap app = new Bootstrap(new JsonModule(),
-  new 

[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244097233
  
--- Diff: 
examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
 ---
@@ -72,69 +74,107 @@ object CarbonSessionExample {
 val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
 
 // scalastyle:off
-spark.sql(
-  s"""
- | LOAD DATA LOCAL INPATH '$path'
- | INTO TABLE source
- | OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
-   """.stripMargin)
-// scalastyle:on
-
-spark.sql(
-  s"""
- | SELECT charField, stringField, intField
- | FROM source
- | WHERE stringfield = 'spark' AND decimalField > 40
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE length(stringField) = 5
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE date_format(dateField, "-MM-dd") = 
"2015-07-23"
-   """.stripMargin).show()
-
-spark.sql("SELECT count(stringField) FROM source").show()
-
-spark.sql(
-  s"""
- | SELECT sum(intField), stringField
- | FROM source
- | GROUP BY stringField
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT t1.*, t2.*
- | FROM source t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | WITH t1 AS (
- | SELECT * FROM source
- | UNION ALL
- | SELECT * FROM source
- | )
- | SELECT t1.*, t2.*
- | FROM t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source
- | WHERE stringField = 'spark' and floatField > 2.8
-   """.stripMargin).show()
+//spark.sql(
+//  s"""
+// | LOAD DATA LOCAL INPATH '$path'
+// | INTO TABLE source
+// | OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
+//   """.stripMargin)
+//// scalastyle:on
+//
+//spark.sql(
+//  s"""
+// | CREATE TABLE source_cs(
+// | shortField SHORT,
+// | intField INT,
+// | bigintField LONG,
+// | doubleField DOUBLE,
+// | stringField STRING,
+// | timestampField TIMESTAMP,
+// | decimalField DECIMAL(18,2),
+// | dateField DATE,
+// | charField CHAR(5),
+// | floatField FLOAT
+// | )
+// | using carbon
+// | location 'file://${ExampleUtils.storeLocation}'
+//   """.stripMargin)
+//
+//spark.sql("insert into source_cs select * from source")
+//
+//spark.sql(
+//  s"""
+// | CREATE TABLE source_par(
+// | shortField SHORT,
+// | intField INT,
+// | bigintField LONG,
+// | doubleField DOUBLE,
+// | stringField STRING,
+// | timestampField TIMESTAMP,
+// | decimalField DECIMAL(18,2),
+// | dateField DATE,
+// | charField CHAR(5),
+// | floatField FLOAT
+// | )
+// | using parquet
+//   """.stripMargin)
+//
+//spark.sql("insert into source_par select * from source")
+//spark.sql(
+//  s"""
+// | SELECT charField, stringField, intField
+// | FROM source
+// | WHERE stringfield = 'spark' AND decimalField > 40
+//  """.stripMargin).show()
+//
+//spark.sql(
+//  s"""
+// | SELECT *
+// | FROM source WHERE length(stringField) = 5
+//   """.stripMargin).show()
+//
+//spark.sql(
+//  s"""
+// | SELECT *
+// | FROM source WHERE date_format(dateField, "-MM-dd") = 
"2015-07-23"
+//   """.stripMargin).show()
+//
+//spark.sql("SELECT count(stringField) FROM source").show()
+//
+//spark.sql(
+//  s"""
+// | SELECT sum(intField), stringField
+// | FROM source
+// | GROUP BY stringField
+//   """.stripMargin).show()
+//
+//spark.sql(
+//  s"""
--- End diff --

Reverted


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244097300
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataConnectorFactory.java
 ---
@@ -17,69 +17,179 @@
 
 package org.apache.carbondata.presto;
 
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.*;
 import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 
 import static java.util.Objects.requireNonNull;
 
-import com.facebook.presto.spi.ConnectorHandleResolver;
+import org.apache.carbondata.presto.impl.CarbonTableConfig;
+
+import com.facebook.presto.hive.HiveConnector;
+import com.facebook.presto.hive.HiveConnectorFactory;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveProcedureModule;
+import com.facebook.presto.hive.HiveSchemaProperties;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveStorageFormat;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.NodeVersion;
+import com.facebook.presto.hive.RebindSafeMBeanServer;
+import com.facebook.presto.hive.authentication.HiveAuthenticationModule;
+import com.facebook.presto.hive.metastore.HiveMetastoreModule;
+import com.facebook.presto.hive.s3.HiveS3Module;
+import com.facebook.presto.hive.security.HiveSecurityModule;
+import com.facebook.presto.hive.security.PartitionsAwareAccessControl;
+import com.facebook.presto.spi.NodeManager;
+import com.facebook.presto.spi.PageIndexerFactory;
+import com.facebook.presto.spi.PageSorter;
 import com.facebook.presto.spi.classloader.ThreadContextClassLoader;
-import com.facebook.presto.spi.connector.*;
-import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorMetadata;
+import com.facebook.presto.spi.connector.Connector;
+import com.facebook.presto.spi.connector.ConnectorAccessControl;
+import com.facebook.presto.spi.connector.ConnectorContext;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.connector.ConnectorSplitManager;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSinkProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorSplitManager;
-import com.google.common.base.Throwables;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider;
+import com.facebook.presto.spi.procedure.Procedure;
+import com.facebook.presto.spi.type.TypeManager;
+import com.google.common.collect.ImmutableSet;
 import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.TypeLiteral;
 import io.airlift.bootstrap.Bootstrap;
 import io.airlift.bootstrap.LifeCycleManager;
+import io.airlift.event.client.EventModule;
 import io.airlift.json.JsonModule;
+import io.airlift.units.DataSize;
+import org.weakref.jmx.guice.MBeanModule;
+import sun.reflect.ConstructorAccessor;
 
+import static com.google.common.base.Throwables.throwIfUnchecked;
+import static io.airlift.configuration.ConfigBinder.configBinder;
 
 /**
  * Build Carbondata Connector
  * It will be called by CarbondataPlugin
  */
-public class CarbondataConnectorFactory implements ConnectorFactory {
+public class CarbondataConnectorFactory extends HiveConnectorFactory {
 
   private final String name;
--- End diff --

removed


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244097200
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataModule.java
 ---
@@ -17,62 +17,150 @@
 
 package org.apache.carbondata.presto;
 
-import javax.inject.Inject;
+import java.util.function.Supplier;
 
 import static java.util.Objects.requireNonNull;
 
-import org.apache.carbondata.presto.impl.CarbonTableConfig;
 import org.apache.carbondata.presto.impl.CarbonTableReader;
 
+import com.facebook.presto.hive.CoercionPolicy;
+import com.facebook.presto.hive.DirectoryLister;
+import com.facebook.presto.hive.FileFormatDataSourceStats;
+import com.facebook.presto.hive.GenericHiveRecordCursorProvider;
+import com.facebook.presto.hive.HadoopDirectoryLister;
+import com.facebook.presto.hive.HdfsConfiguration;
+import com.facebook.presto.hive.HdfsConfigurationUpdater;
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveClientModule;
+import com.facebook.presto.hive.HiveCoercionPolicy;
+import com.facebook.presto.hive.HiveConnectorId;
+import com.facebook.presto.hive.HiveEventClient;
+import com.facebook.presto.hive.HiveFileWriterFactory;
+import com.facebook.presto.hive.HiveHdfsConfiguration;
+import com.facebook.presto.hive.HiveLocationService;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveNodePartitioningProvider;
+import com.facebook.presto.hive.HivePageSinkProvider;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePartitionManager;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveSplitManager;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.HiveTypeTranslator;
+import com.facebook.presto.hive.HiveWriterStats;
+import com.facebook.presto.hive.LocationService;
+import com.facebook.presto.hive.NamenodeStats;
+import com.facebook.presto.hive.OrcFileWriterConfig;
+import com.facebook.presto.hive.OrcFileWriterFactory;
+import com.facebook.presto.hive.PartitionUpdate;
+import com.facebook.presto.hive.RcFileFileWriterFactory;
+import com.facebook.presto.hive.TableParameterCodec;
+import com.facebook.presto.hive.TransactionalMetadata;
+import com.facebook.presto.hive.TypeTranslator;
+import com.facebook.presto.hive.orc.DwrfPageSourceFactory;
+import com.facebook.presto.hive.orc.OrcPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetRecordCursorProvider;
+import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
 import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
 import com.facebook.presto.spi.connector.ConnectorSplitManager;
-import com.facebook.presto.spi.type.Type;
-import com.facebook.presto.spi.type.TypeManager;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer;
 import com.google.inject.Binder;
-import com.google.inject.Module;
 import com.google.inject.Scopes;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import io.airlift.event.client.EventClient;
 
-import static 
com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
-import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.inject.multibindings.Multibinder.newSetBinder;
 import static io.airlift.configuration.ConfigBinder.configBinder;
+import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
 
-public class CarbondataModule implements Module {
+import static org.weakref.jmx.ObjectNames.generatedNameOf;
+import static org.weakref.jmx.guice.ExportBinder.newExporter;
+
+public class CarbondataModule extends HiveClientModule {
 
   private final String connectorId;
-  private final TypeManager typeManager;
 
-  public CarbondataModule(String connectorId, TypeManager typeManager) {
+  public CarbondataModule(String connectorId) {
+super(connectorId);
 this.connectorId = requireNonNull(connectorId, "connector id is null");
-this.typeManager = requireNonNull(typeManager, "typeManager is null");
   }
 
   @Override public void 

[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244097091
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java
 ---
@@ -113,7 +132,7 @@ private PrestoCarbonVectorizedRecordReader 
createReader(ConnectorSplit split,
   PrestoCarbonVectorizedRecordReader reader =
   new PrestoCarbonVectorizedRecordReader(queryExecutor, queryModel,
   (AbstractDetailQueryResultIterator) iterator, readSupport);
-  reader.setTaskId(carbondataSplit.getIndex());
+  
reader.setTaskId(Long.parseLong(carbonSplit.getSchema().getProperty("index")));
--- End diff --

ok, generalized


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244097070
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java
 ---
@@ -43,63 +44,81 @@
 
 import static org.apache.carbondata.presto.Types.checkType;
 
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveColumnHandle;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePageSourceProvider;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSplit;
 import com.facebook.presto.spi.ColumnHandle;
 import com.facebook.presto.spi.ConnectorPageSource;
 import com.facebook.presto.spi.ConnectorSession;
 import com.facebook.presto.spi.ConnectorSplit;
-import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.SchemaTableName;
 import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
+import com.facebook.presto.spi.type.TypeManager;
 import com.google.common.collect.ImmutableList;
 import com.google.inject.Inject;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskAttemptContextImpl;
 import org.apache.hadoop.mapred.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
-
 /**
  * Provider Class for Carbondata Page Source class.
  */
-public class CarbondataPageSourceProvider implements 
ConnectorPageSourceProvider {
+public class CarbondataPageSourceProvider extends HivePageSourceProvider {
 
-  private String connectorId;
   private CarbonTableReader carbonTableReader;
   private String queryId ;
-
-  @Inject public CarbondataPageSourceProvider(CarbondataConnectorId 
connectorId,
+  private HdfsEnvironment hdfsEnvironment;
+
+  @Inject public CarbondataPageSourceProvider(
+  HiveClientConfig hiveClientConfig,
+  HdfsEnvironment hdfsEnvironment,
+  Set cursorProviders,
+  Set pageSourceFactories,
+  TypeManager typeManager,
   CarbonTableReader carbonTableReader) {
-this.connectorId = requireNonNull(connectorId, "connectorId is 
null").toString();
+super(hiveClientConfig, hdfsEnvironment, cursorProviders, 
pageSourceFactories, typeManager);
 this.carbonTableReader = requireNonNull(carbonTableReader, 
"carbonTableReader is null");
+this.hdfsEnvironment = hdfsEnvironment;
   }
 
   @Override
   public ConnectorPageSource createPageSource(ConnectorTransactionHandle 
transactionHandle,
   ConnectorSession session, ConnectorSplit split, List 
columns) {
-this.queryId = ((CarbondataSplit)split).getQueryId();
+HiveSplit carbonSplit =
+checkType(split, HiveSplit.class, "split is not class HiveSplit");
+if (carbonSplit.getSchema().getProperty("queryId") == null) {
+  return super.createPageSource(transactionHandle, session, split, 
columns);
+}
+this.queryId = carbonSplit.getSchema().getProperty("queryId");
+Configuration configuration = this.hdfsEnvironment.getConfiguration(
+new HdfsEnvironment.HdfsContext(session, 
carbonSplit.getDatabase(), carbonSplit.getTable()),
+new Path(carbonSplit.getSchema().getProperty("tablePath")));
+configuration = carbonTableReader.updateS3Properties(configuration);
 CarbonDictionaryDecodeReadSupport readSupport = new 
CarbonDictionaryDecodeReadSupport();
 PrestoCarbonVectorizedRecordReader carbonRecordReader =
-createReader(split, columns, readSupport);
+createReader(carbonSplit, columns, readSupport, configuration);
 return new CarbondataPageSource(carbonRecordReader, columns);
   }
 
   /**
-   * @param split
+   * @param carbonSplit
* @param columns
* @param readSupport
--- End diff --

ok


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread ravipesala
Github user ravipesala commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244097115
  
--- Diff: 
examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
 ---
@@ -72,69 +74,107 @@ object CarbonSessionExample {
 val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
 
 // scalastyle:off
-spark.sql(
-  s"""
- | LOAD DATA LOCAL INPATH '$path'
- | INTO TABLE source
- | OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
-   """.stripMargin)
-// scalastyle:on
-
-spark.sql(
-  s"""
- | SELECT charField, stringField, intField
- | FROM source
- | WHERE stringfield = 'spark' AND decimalField > 40
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE length(stringField) = 5
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE date_format(dateField, "-MM-dd") = 
"2015-07-23"
-   """.stripMargin).show()
-
-spark.sql("SELECT count(stringField) FROM source").show()
-
-spark.sql(
-  s"""
- | SELECT sum(intField), stringField
- | FROM source
- | GROUP BY stringField
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT t1.*, t2.*
- | FROM source t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | WITH t1 AS (
- | SELECT * FROM source
- | UNION ALL
- | SELECT * FROM source
- | )
- | SELECT t1.*, t2.*
- | FROM t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source
- | WHERE stringField = 'spark' and floatField > 2.8
-   """.stripMargin).show()
+//spark.sql(
--- End diff --

done


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread kumarvishal09
Github user kumarvishal09 commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244090651
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataConnectorFactory.java
 ---
@@ -17,69 +17,179 @@
 
 package org.apache.carbondata.presto;
 
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.*;
 import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 
 import static java.util.Objects.requireNonNull;
 
-import com.facebook.presto.spi.ConnectorHandleResolver;
+import org.apache.carbondata.presto.impl.CarbonTableConfig;
+
+import com.facebook.presto.hive.HiveConnector;
+import com.facebook.presto.hive.HiveConnectorFactory;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveProcedureModule;
+import com.facebook.presto.hive.HiveSchemaProperties;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveStorageFormat;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.NodeVersion;
+import com.facebook.presto.hive.RebindSafeMBeanServer;
+import com.facebook.presto.hive.authentication.HiveAuthenticationModule;
+import com.facebook.presto.hive.metastore.HiveMetastoreModule;
+import com.facebook.presto.hive.s3.HiveS3Module;
+import com.facebook.presto.hive.security.HiveSecurityModule;
+import com.facebook.presto.hive.security.PartitionsAwareAccessControl;
+import com.facebook.presto.spi.NodeManager;
+import com.facebook.presto.spi.PageIndexerFactory;
+import com.facebook.presto.spi.PageSorter;
 import com.facebook.presto.spi.classloader.ThreadContextClassLoader;
-import com.facebook.presto.spi.connector.*;
-import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorMetadata;
+import com.facebook.presto.spi.connector.Connector;
+import com.facebook.presto.spi.connector.ConnectorAccessControl;
+import com.facebook.presto.spi.connector.ConnectorContext;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.connector.ConnectorSplitManager;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSinkProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider;
 import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorSplitManager;
-import com.google.common.base.Throwables;
+import 
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider;
+import com.facebook.presto.spi.procedure.Procedure;
+import com.facebook.presto.spi.type.TypeManager;
+import com.google.common.collect.ImmutableSet;
 import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.TypeLiteral;
 import io.airlift.bootstrap.Bootstrap;
 import io.airlift.bootstrap.LifeCycleManager;
+import io.airlift.event.client.EventModule;
 import io.airlift.json.JsonModule;
+import io.airlift.units.DataSize;
+import org.weakref.jmx.guice.MBeanModule;
+import sun.reflect.ConstructorAccessor;
 
+import static com.google.common.base.Throwables.throwIfUnchecked;
+import static io.airlift.configuration.ConfigBinder.configBinder;
 
 /**
  * Build Carbondata Connector
  * It will be called by CarbondataPlugin
  */
-public class CarbondataConnectorFactory implements ConnectorFactory {
+public class CarbondataConnectorFactory extends HiveConnectorFactory {
 
   private final String name;
--- End diff --

name is not used, please remove 


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-26 Thread chenliang613
Github user chenliang613 commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r244003542
  
--- Diff: 
examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
 ---
@@ -72,69 +74,107 @@ object CarbonSessionExample {
 val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
 
 // scalastyle:off
-spark.sql(
-  s"""
- | LOAD DATA LOCAL INPATH '$path'
- | INTO TABLE source
- | OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
-   """.stripMargin)
-// scalastyle:on
-
-spark.sql(
-  s"""
- | SELECT charField, stringField, intField
- | FROM source
- | WHERE stringfield = 'spark' AND decimalField > 40
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE length(stringField) = 5
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE date_format(dateField, "-MM-dd") = 
"2015-07-23"
-   """.stripMargin).show()
-
-spark.sql("SELECT count(stringField) FROM source").show()
-
-spark.sql(
-  s"""
- | SELECT sum(intField), stringField
- | FROM source
- | GROUP BY stringField
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT t1.*, t2.*
- | FROM source t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | WITH t1 AS (
- | SELECT * FROM source
- | UNION ALL
- | SELECT * FROM source
- | )
- | SELECT t1.*, t2.*
- | FROM t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source
- | WHERE stringField = 'spark' and floatField > 2.8
-   """.stripMargin).show()
+//spark.sql(
+//  s"""
+// | LOAD DATA LOCAL INPATH '$path'
+// | INTO TABLE source
+// | OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
+//   """.stripMargin)
+//// scalastyle:on
+//
+//spark.sql(
+//  s"""
+// | CREATE TABLE source_cs(
+// | shortField SHORT,
+// | intField INT,
+// | bigintField LONG,
+// | doubleField DOUBLE,
+// | stringField STRING,
+// | timestampField TIMESTAMP,
+// | decimalField DECIMAL(18,2),
+// | dateField DATE,
+// | charField CHAR(5),
+// | floatField FLOAT
+// | )
+// | using carbon
+// | location 'file://${ExampleUtils.storeLocation}'
+//   """.stripMargin)
+//
+//spark.sql("insert into source_cs select * from source")
+//
+//spark.sql(
+//  s"""
+// | CREATE TABLE source_par(
+// | shortField SHORT,
+// | intField INT,
+// | bigintField LONG,
+// | doubleField DOUBLE,
+// | stringField STRING,
+// | timestampField TIMESTAMP,
+// | decimalField DECIMAL(18,2),
+// | dateField DATE,
+// | charField CHAR(5),
+// | floatField FLOAT
+// | )
+// | using parquet
+//   """.stripMargin)
+//
+//spark.sql("insert into source_par select * from source")
+//spark.sql(
+//  s"""
+// | SELECT charField, stringField, intField
+// | FROM source
+// | WHERE stringfield = 'spark' AND decimalField > 40
+//  """.stripMargin).show()
+//
+//spark.sql(
+//  s"""
+// | SELECT *
+// | FROM source WHERE length(stringField) = 5
+//   """.stripMargin).show()
+//
+//spark.sql(
+//  s"""
+// | SELECT *
+// | FROM source WHERE date_format(dateField, "-MM-dd") = 
"2015-07-23"
+//   """.stripMargin).show()
+//
+//spark.sql("SELECT count(stringField) FROM source").show()
+//
+//spark.sql(
+//  s"""
+// | SELECT sum(intField), stringField
+// | FROM source
+// | GROUP BY stringField
+//   """.stripMargin).show()
+//
+//spark.sql(
+//  s"""
--- End diff --

why disable all these code?


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-24 Thread ajantha-bhat
Github user ajantha-bhat commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r243849816
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataModule.java
 ---
@@ -17,62 +17,150 @@
 
 package org.apache.carbondata.presto;
 
-import javax.inject.Inject;
+import java.util.function.Supplier;
 
 import static java.util.Objects.requireNonNull;
 
-import org.apache.carbondata.presto.impl.CarbonTableConfig;
 import org.apache.carbondata.presto.impl.CarbonTableReader;
 
+import com.facebook.presto.hive.CoercionPolicy;
+import com.facebook.presto.hive.DirectoryLister;
+import com.facebook.presto.hive.FileFormatDataSourceStats;
+import com.facebook.presto.hive.GenericHiveRecordCursorProvider;
+import com.facebook.presto.hive.HadoopDirectoryLister;
+import com.facebook.presto.hive.HdfsConfiguration;
+import com.facebook.presto.hive.HdfsConfigurationUpdater;
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveClientModule;
+import com.facebook.presto.hive.HiveCoercionPolicy;
+import com.facebook.presto.hive.HiveConnectorId;
+import com.facebook.presto.hive.HiveEventClient;
+import com.facebook.presto.hive.HiveFileWriterFactory;
+import com.facebook.presto.hive.HiveHdfsConfiguration;
+import com.facebook.presto.hive.HiveLocationService;
+import com.facebook.presto.hive.HiveMetadataFactory;
+import com.facebook.presto.hive.HiveNodePartitioningProvider;
+import com.facebook.presto.hive.HivePageSinkProvider;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePartitionManager;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSessionProperties;
+import com.facebook.presto.hive.HiveSplitManager;
+import com.facebook.presto.hive.HiveTableProperties;
+import com.facebook.presto.hive.HiveTransactionManager;
+import com.facebook.presto.hive.HiveTypeTranslator;
+import com.facebook.presto.hive.HiveWriterStats;
+import com.facebook.presto.hive.LocationService;
+import com.facebook.presto.hive.NamenodeStats;
+import com.facebook.presto.hive.OrcFileWriterConfig;
+import com.facebook.presto.hive.OrcFileWriterFactory;
+import com.facebook.presto.hive.PartitionUpdate;
+import com.facebook.presto.hive.RcFileFileWriterFactory;
+import com.facebook.presto.hive.TableParameterCodec;
+import com.facebook.presto.hive.TransactionalMetadata;
+import com.facebook.presto.hive.TypeTranslator;
+import com.facebook.presto.hive.orc.DwrfPageSourceFactory;
+import com.facebook.presto.hive.orc.OrcPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetPageSourceFactory;
+import com.facebook.presto.hive.parquet.ParquetRecordCursorProvider;
+import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory;
+import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
+import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
 import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
 import com.facebook.presto.spi.connector.ConnectorSplitManager;
-import com.facebook.presto.spi.type.Type;
-import com.facebook.presto.spi.type.TypeManager;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer;
 import com.google.inject.Binder;
-import com.google.inject.Module;
 import com.google.inject.Scopes;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import io.airlift.event.client.EventClient;
 
-import static 
com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
-import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.inject.multibindings.Multibinder.newSetBinder;
 import static io.airlift.configuration.ConfigBinder.configBinder;
+import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
 
-public class CarbondataModule implements Module {
+import static org.weakref.jmx.ObjectNames.generatedNameOf;
+import static org.weakref.jmx.guice.ExportBinder.newExporter;
+
+public class CarbondataModule extends HiveClientModule {
 
   private final String connectorId;
-  private final TypeManager typeManager;
 
-  public CarbondataModule(String connectorId, TypeManager typeManager) {
+  public CarbondataModule(String connectorId) {
+super(connectorId);
 this.connectorId = requireNonNull(connectorId, "connector id is null");
-this.typeManager = requireNonNull(typeManager, "typeManager is null");
   }
 
   @Override public void 

[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-24 Thread qiuchenjian
Github user qiuchenjian commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r243822523
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java
 ---
@@ -113,7 +132,7 @@ private PrestoCarbonVectorizedRecordReader 
createReader(ConnectorSplit split,
   PrestoCarbonVectorizedRecordReader reader =
   new PrestoCarbonVectorizedRecordReader(queryExecutor, queryModel,
   (AbstractDetailQueryResultIterator) iterator, readSupport);
-  reader.setTaskId(carbondataSplit.getIndex());
+  
reader.setTaskId(Long.parseLong(carbonSplit.getSchema().getProperty("index")));
--- End diff --

I think it‘s better to catch the NumberFormatException 


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-24 Thread ajantha-bhat
Github user ajantha-bhat commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r243821888
  
--- Diff: 
examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
 ---
@@ -72,69 +74,107 @@ object CarbonSessionExample {
 val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
 
 // scalastyle:off
-spark.sql(
-  s"""
- | LOAD DATA LOCAL INPATH '$path'
- | INTO TABLE source
- | OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
-   """.stripMargin)
-// scalastyle:on
-
-spark.sql(
-  s"""
- | SELECT charField, stringField, intField
- | FROM source
- | WHERE stringfield = 'spark' AND decimalField > 40
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE length(stringField) = 5
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source WHERE date_format(dateField, "-MM-dd") = 
"2015-07-23"
-   """.stripMargin).show()
-
-spark.sql("SELECT count(stringField) FROM source").show()
-
-spark.sql(
-  s"""
- | SELECT sum(intField), stringField
- | FROM source
- | GROUP BY stringField
-   """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT t1.*, t2.*
- | FROM source t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | WITH t1 AS (
- | SELECT * FROM source
- | UNION ALL
- | SELECT * FROM source
- | )
- | SELECT t1.*, t2.*
- | FROM t1, source t2
- | WHERE t1.stringField = t2.stringField
-  """.stripMargin).show()
-
-spark.sql(
-  s"""
- | SELECT *
- | FROM source
- | WHERE stringField = 'spark' and floatField > 2.8
-   """.stripMargin).show()
+//spark.sql(
--- End diff --

please revert the changes in this file


---


[GitHub] carbondata pull request #3019: [CARBONDATA-3194] Integrating Carbon with Pre...

2018-12-24 Thread qiuchenjian
Github user qiuchenjian commented on a diff in the pull request:

https://github.com/apache/carbondata/pull/3019#discussion_r243821704
  
--- Diff: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java
 ---
@@ -43,63 +44,81 @@
 
 import static org.apache.carbondata.presto.Types.checkType;
 
+import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
+import com.facebook.presto.hive.HiveColumnHandle;
+import com.facebook.presto.hive.HivePageSourceFactory;
+import com.facebook.presto.hive.HivePageSourceProvider;
+import com.facebook.presto.hive.HiveRecordCursorProvider;
+import com.facebook.presto.hive.HiveSplit;
 import com.facebook.presto.spi.ColumnHandle;
 import com.facebook.presto.spi.ConnectorPageSource;
 import com.facebook.presto.spi.ConnectorSession;
 import com.facebook.presto.spi.ConnectorSplit;
-import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
+import com.facebook.presto.spi.SchemaTableName;
 import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
+import com.facebook.presto.spi.type.TypeManager;
 import com.google.common.collect.ImmutableList;
 import com.google.inject.Inject;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskAttemptContextImpl;
 import org.apache.hadoop.mapred.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
-
 /**
  * Provider Class for Carbondata Page Source class.
  */
-public class CarbondataPageSourceProvider implements 
ConnectorPageSourceProvider {
+public class CarbondataPageSourceProvider extends HivePageSourceProvider {
 
-  private String connectorId;
   private CarbonTableReader carbonTableReader;
   private String queryId ;
-
-  @Inject public CarbondataPageSourceProvider(CarbondataConnectorId 
connectorId,
+  private HdfsEnvironment hdfsEnvironment;
+
+  @Inject public CarbondataPageSourceProvider(
+  HiveClientConfig hiveClientConfig,
+  HdfsEnvironment hdfsEnvironment,
+  Set cursorProviders,
+  Set pageSourceFactories,
+  TypeManager typeManager,
   CarbonTableReader carbonTableReader) {
-this.connectorId = requireNonNull(connectorId, "connectorId is 
null").toString();
+super(hiveClientConfig, hdfsEnvironment, cursorProviders, 
pageSourceFactories, typeManager);
 this.carbonTableReader = requireNonNull(carbonTableReader, 
"carbonTableReader is null");
+this.hdfsEnvironment = hdfsEnvironment;
   }
 
   @Override
   public ConnectorPageSource createPageSource(ConnectorTransactionHandle 
transactionHandle,
   ConnectorSession session, ConnectorSplit split, List 
columns) {
-this.queryId = ((CarbondataSplit)split).getQueryId();
+HiveSplit carbonSplit =
+checkType(split, HiveSplit.class, "split is not class HiveSplit");
+if (carbonSplit.getSchema().getProperty("queryId") == null) {
+  return super.createPageSource(transactionHandle, session, split, 
columns);
+}
+this.queryId = carbonSplit.getSchema().getProperty("queryId");
+Configuration configuration = this.hdfsEnvironment.getConfiguration(
+new HdfsEnvironment.HdfsContext(session, 
carbonSplit.getDatabase(), carbonSplit.getTable()),
+new Path(carbonSplit.getSchema().getProperty("tablePath")));
+configuration = carbonTableReader.updateS3Properties(configuration);
 CarbonDictionaryDecodeReadSupport readSupport = new 
CarbonDictionaryDecodeReadSupport();
 PrestoCarbonVectorizedRecordReader carbonRecordReader =
-createReader(split, columns, readSupport);
+createReader(carbonSplit, columns, readSupport, configuration);
 return new CarbondataPageSource(carbonRecordReader, columns);
   }
 
   /**
-   * @param split
+   * @param carbonSplit
* @param columns
* @param readSupport
--- End diff --

missing param of conf


---