This is an automated email from the ASF dual-hosted git repository.

shaofengshi pushed a commit to branch engine-flink
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/engine-flink by this push:
     new e1c1d7c  Less array copy
e1c1d7c is described below

commit e1c1d7cda53631baa4ed9c4d081a2ea45ff0107b
Author: shaofengshi <shaofeng...@apache.org>
AuthorDate: Sun Mar 31 10:40:05 2019 +0800

    Less array copy
---
 .../main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java | 2 +-
 .../main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java   | 6 +++---
 .../src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java      | 2 --
 .../main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java | 2 +-
 4 files changed, 5 insertions(+), 7 deletions(-)

diff --git 
a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
 
b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
index ba1f233..b8ddf95 100644
--- 
a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
+++ 
b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
@@ -70,7 +70,7 @@ import java.util.List;
 import java.util.Locale;
 
 /**
- * Flink application to build cube with the "by-layer" algorithm. Only support 
source data from Hive; Metadata in HBase.
+ * Flink application to build cube with the "by-layer" algorithm.
  */
 public class FlinkCubingByLayer extends AbstractApplication implements 
Serializable {
 
diff --git 
a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
 
b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
index c51d69c..fcf8d6c 100644
--- 
a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
+++ 
b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
@@ -299,9 +299,9 @@ public class FlinkCubingMerge extends AbstractApplication 
implements Serializabl
         @Override
         public Tuple2<Text, Text> map(Tuple2<Text, Object[]> tuple2) throws 
Exception {
             ByteBuffer valueBuf = codec.encode(tuple2.f1);
-            byte[] encodedBytes = new byte[valueBuf.position()];
-            System.arraycopy(valueBuf.array(), 0, encodedBytes, 0, 
valueBuf.position());
-            return new Tuple2<>(tuple2.f0, new Text(encodedBytes));
+            Text result = new Text();
+            result.set(valueBuf.array(), 0, valueBuf.position());
+            return new Tuple2<>(tuple2.f0, result);
         }
 
     }
diff --git 
a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java 
b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
index 4473a44..e7b1a49 100644
--- a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
+++ b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
@@ -140,8 +140,6 @@ public class FlinkUtil {
                         @Override
                         public String[] map(Tuple2<BytesWritable, Text> 
tuple2) throws Exception {
 
-                            System.out.println("read records from hive.");
-
                             String s = Bytes.toString(tuple2.f1.getBytes(), 0, 
tuple2.f1.getLength());
                             return 
s.split(BatchConstants.SEQUENCE_FILE_DEFAULT_DELIMITER);
                         }
diff --git 
a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
 
b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index f3b0a13..7e27956 100644
--- 
a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ 
b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -73,7 +73,7 @@ import org.slf4j.LoggerFactory;
 import scala.Tuple2;
 
 /**
- * Spark application to build cube with the "by-layer" algorithm. Only support 
source data from Hive; Metadata in HBase.
+ * Spark application to build cube with the "by-layer" algorithm.
  */
 public class SparkCubingByLayer extends AbstractApplication implements 
Serializable {
 

Reply via email to