[ https://issues.apache.org/jira/browse/FLINK-2168?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15838815#comment-15838815 ]
ASF GitHub Bot commented on FLINK-2168: --------------------------------------- Github user tonycox commented on a diff in the pull request: https://github.com/apache/flink/pull/3149#discussion_r97903962 --- Diff: flink-connectors/flink-hbase/src/test/java/org/apache/flink/addons/hbase/example/HBaseTableSourceITCase.java --- @@ -0,0 +1,248 @@ +/* + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.flink.addons.hbase.example; + +import org.apache.flink.addons.hbase.HBaseTableSchema; +import org.apache.flink.addons.hbase.HBaseTableSource; +import org.apache.flink.addons.hbase.HBaseTestingClusterAutostarter; +import org.apache.flink.api.common.functions.MapFunction; +import org.apache.flink.api.java.DataSet; +import org.apache.flink.api.java.ExecutionEnvironment; +import org.apache.flink.api.java.tuple.Tuple; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.TableConfig; +import org.apache.flink.table.api.TableEnvironment; +import org.apache.flink.table.api.java.BatchTableEnvironment; +import org.apache.flink.table.sources.BatchTableSource; +import org.apache.flink.types.Row; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.assertEquals; + +public class HBaseTableSourceITCase extends HBaseTestingClusterAutostarter { + + public static final byte[] ROW_1 = Bytes.toBytes("row1"); + public static final byte[] ROW_2 = Bytes.toBytes("row2"); + public static final byte[] ROW_3 = Bytes.toBytes("row3"); + public static final byte[] F_1 = Bytes.toBytes("f1"); + public static final byte[] F_2 = Bytes.toBytes("f2"); + public static final byte[] Q_1 = Bytes.toBytes("q1"); + public static final byte[] Q_2 = Bytes.toBytes("q2"); + public static final byte[] Q_3 = Bytes.toBytes("q3"); + + @BeforeClass + public static void activateHBaseCluster(){ + registerHBaseMiniClusterInClasspath(); + } + + @Test + public void testHBaseTableSourceWithSingleColumnFamily() throws Exception { + // create a table with single region + MapFunction<Row, String> mapFunction = new MapFunction<Row, String>() { + + @Override + public String map(Row value) throws Exception { + return value == null ? "null" : value.toString(); + } + }; + TableName tableName = TableName.valueOf("test"); + // no split keys + byte[][] famNames = new byte[1][]; + famNames[0] = F_1; + createTable(tableName, famNames, null); + // get the htable instance + HTable table = openTable(tableName); + List<Put> puts = new ArrayList<Put>(); + // add some data + Put put = new Put(ROW_1); + // add 3 qualifiers per row + //1st qual is integer + put.addColumn(F_1, Q_1, Bytes.toBytes(100)); + //2nd qual is String + put.addColumn(F_1, Q_2, Bytes.toBytes("strvalue")); + // 3rd qual is long + put.addColumn(F_1, Q_3, Bytes.toBytes(19991l)); + puts.add(put); + + put = new Put(ROW_2); + // add 3 qualifiers per row + //1st qual is integer + put.addColumn(F_1, Q_1, Bytes.toBytes(101)); + //2nd qual is String + put.addColumn(F_1, Q_2, Bytes.toBytes("strvalue1")); + // 3rd qual is long + put.addColumn(F_1, Q_3, Bytes.toBytes(19992l)); + puts.add(put); + + put = new Put(ROW_3); + // add 3 qualifiers per row + //1st qual is integer + put.addColumn(F_1, Q_1, Bytes.toBytes(102)); + //2nd qual is String + put.addColumn(F_1, Q_2, Bytes.toBytes("strvalue2")); + // 3rd qual is long + put.addColumn(F_1, Q_3, Bytes.toBytes(19993l)); --- End diff -- could you change it to upper L? low l sometimes looks like 1 > Add HBaseTableSource > -------------------- > > Key: FLINK-2168 > URL: https://issues.apache.org/jira/browse/FLINK-2168 > Project: Flink > Issue Type: New Feature > Components: Table API & SQL > Affects Versions: 0.9 > Reporter: Fabian Hueske > Assignee: ramkrishna.s.vasudevan > Priority: Minor > Labels: starter > > Add a {{HBaseTableSource}} to read data from a HBase table. The > {{HBaseTableSource}} should implement the {{ProjectableTableSource}} > (FLINK-3848) and {{FilterableTableSource}} (FLINK-3849) interfaces. > The implementation can be based on Flink's {{TableInputFormat}}. -- This message was sent by Atlassian JIRA (v6.3.4#6332)