hbase git commit: HBASE-18673 Some more unwanted reference to unshaded PB classes
Repository: hbase Updated Branches: refs/heads/branch-2 81ccef83b -> 60698 HBASE-18673 Some more unwanted reference to unshaded PB classes Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6069 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6069 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6069 Branch: refs/heads/branch-2 Commit: 60698c457c35bdcc6d13557709dfc5edd608 Parents: 81ccef8 Author: Michael StackAuthored: Thu Aug 24 16:31:31 2017 -0700 Committer: Michael Stack Committed: Thu Aug 24 20:59:50 2017 -0700 -- .../apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java | 4 ++-- .../src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6069/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index f445059..593a761 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A Protobuf based WAL has the following structure: http://git-wip-us.apache.org/repos/asf/hbase/blob/6069/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 199ed7d..bc663e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering; import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap; -import com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.FileNotFoundException; @@ -745,7 +744,7 @@ public class HBaseFsck extends Configured implements Closeable { * @return 0 on success, non-zero on failure */ public int onlineHbck() - throws IOException, KeeperException, InterruptedException, ServiceException { + throws IOException, KeeperException, InterruptedException { // print hbase server version errors.print("Version: " + status.getHBaseVersion()); @@ -4841,7 +4840,7 @@ public class HBaseFsck extends Configured implements Closeable { public HBaseFsck exec(ExecutorService exec, String[] args) throws KeeperException, IOException, -ServiceException, InterruptedException { + InterruptedException { long sleepBeforeRerun = DEFAULT_SLEEP_BEFORE_RERUN; boolean checkCorruptHFiles = false;
hbase git commit: HBASE-18673 Some more unwanted reference to unshaded PB classes
Repository: hbase Updated Branches: refs/heads/master 98bb5c05e -> 08a9522fc HBASE-18673 Some more unwanted reference to unshaded PB classes Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/08a9522f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/08a9522f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/08a9522f Branch: refs/heads/master Commit: 08a9522fc1610231c34d4d865777618eb06d9e19 Parents: 98bb5c0 Author: Michael StackAuthored: Thu Aug 24 16:31:31 2017 -0700 Committer: Michael Stack Committed: Thu Aug 24 20:59:23 2017 -0700 -- .../apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java | 4 ++-- .../src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/08a9522f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index f445059..593a761 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A Protobuf based WAL has the following structure: http://git-wip-us.apache.org/repos/asf/hbase/blob/08a9522f/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 199ed7d..bc663e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering; import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap; -import com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.FileNotFoundException; @@ -745,7 +744,7 @@ public class HBaseFsck extends Configured implements Closeable { * @return 0 on success, non-zero on failure */ public int onlineHbck() - throws IOException, KeeperException, InterruptedException, ServiceException { + throws IOException, KeeperException, InterruptedException { // print hbase server version errors.print("Version: " + status.getHBaseVersion()); @@ -4841,7 +4840,7 @@ public class HBaseFsck extends Configured implements Closeable { public HBaseFsck exec(ExecutorService exec, String[] args) throws KeeperException, IOException, -ServiceException, InterruptedException { + InterruptedException { long sleepBeforeRerun = DEFAULT_SLEEP_BEFORE_RERUN; boolean checkCorruptHFiles = false;
hbase git commit: HBASE-18448 Added refresh HFiles coprocessor endpoint
Repository: hbase Updated Branches: refs/heads/branch-2 99e84a26f -> 81ccef83b HBASE-18448 Added refresh HFiles coprocessor endpoint Signed-off-by: anoopsamjohnProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/81ccef83 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/81ccef83 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/81ccef83 Branch: refs/heads/branch-2 Commit: 81ccef83becbd756c05eebace8ebd7eef82b9e85 Parents: 99e84a2 Author: Ajay Jadhav Authored: Mon Aug 21 17:24:28 2017 -0700 Committer: anoopsamjohn Committed: Fri Aug 25 06:58:18 2017 +0530 -- .../client/example/RefreshHFilesClient.java | 95 ++ .../example/RefreshHFilesEndpoint.java | 86 + .../src/main/protobuf/RefreshHFiles.proto | 36 .../example/TestRefreshHFilesEndpoint.java | 177 +++ .../hadoop/hbase/HBaseTestingUtility.java | 18 ++ 5 files changed, 412 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/81ccef83/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java new file mode 100644 index 000..0401959 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.example; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This client class is for invoking the refresh HFile function deployed on the + * Region Server side via the RefreshHFilesService. + */ +public class RefreshHFilesClient implements Closeable { + private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private final Connection connection; + + /** + * Constructor with Conf object + * + * @param cfg + */ + public RefreshHFilesClient(Configuration cfg) { +try { + this.connection = ConnectionFactory.createConnection(cfg); +} catch (IOException e) { + throw new RuntimeException(e); +} + } + + @Override + public void close() throws IOException { +if (this.connection != null && !this.connection.isClosed()) { + this.connection.close(); +} + } + + public void refreshHFiles(final TableName tableName) throws Throwable { +try (Table table = connection.getTable(tableName)) { + refreshHFiles(table); +} + } + + public void refreshHFiles(final Table table) throws Throwable { +final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest + .getDefaultInstance(); +table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW, + HConstants.EMPTY_END_ROW, + new Batch.Call () { +
hbase git commit: HBASE-18448 Added refresh HFiles coprocessor endpoint
Repository: hbase Updated Branches: refs/heads/master 1a2c38b96 -> 98bb5c05e HBASE-18448 Added refresh HFiles coprocessor endpoint Signed-off-by: anoopsamjohnProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/98bb5c05 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/98bb5c05 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/98bb5c05 Branch: refs/heads/master Commit: 98bb5c05e3cfcd90f2a3a25902f1eff8d8d37619 Parents: 1a2c38b Author: Ajay Jadhav Authored: Mon Aug 21 17:24:28 2017 -0700 Committer: anoopsamjohn Committed: Fri Aug 25 06:56:41 2017 +0530 -- .../client/example/RefreshHFilesClient.java | 95 ++ .../example/RefreshHFilesEndpoint.java | 86 + .../src/main/protobuf/RefreshHFiles.proto | 36 .../example/TestRefreshHFilesEndpoint.java | 177 +++ .../hadoop/hbase/HBaseTestingUtility.java | 18 ++ 5 files changed, 412 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/98bb5c05/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java new file mode 100644 index 000..0401959 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.example; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This client class is for invoking the refresh HFile function deployed on the + * Region Server side via the RefreshHFilesService. + */ +public class RefreshHFilesClient implements Closeable { + private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private final Connection connection; + + /** + * Constructor with Conf object + * + * @param cfg + */ + public RefreshHFilesClient(Configuration cfg) { +try { + this.connection = ConnectionFactory.createConnection(cfg); +} catch (IOException e) { + throw new RuntimeException(e); +} + } + + @Override + public void close() throws IOException { +if (this.connection != null && !this.connection.isClosed()) { + this.connection.close(); +} + } + + public void refreshHFiles(final TableName tableName) throws Throwable { +try (Table table = connection.getTable(tableName)) { + refreshHFiles(table); +} + } + + public void refreshHFiles(final Table table) throws Throwable { +final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest + .getDefaultInstance(); +table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW, + HConstants.EMPTY_END_ROW, + new Batch.Call () { +
hbase git commit: HBASE-18448 Added refresh HFiles coprocessor endpoint
Repository: hbase Updated Branches: refs/heads/branch-1.4 32bc800bf -> 75ab445eb HBASE-18448 Added refresh HFiles coprocessor endpoint Signed-off-by: Michael StackProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/75ab445e Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/75ab445e Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/75ab445e Branch: refs/heads/branch-1.4 Commit: 75ab445eb1f99f1f44382a71b5f681aeb395c2db Parents: 32bc800 Author: Ajay Jadhav Authored: Mon Aug 21 16:31:15 2017 -0700 Committer: Michael Stack Committed: Thu Aug 24 16:38:52 2017 -0700 -- hbase-examples/pom.xml | 1 + .../client/example/RefreshHFilesClient.java | 95 ++ .../example/RefreshHFilesEndpoint.java | 86 ++ .../protobuf/generated/RefreshHFilesProtos.java | 973 +++ .../src/main/protobuf/RefreshHFiles.proto | 36 + .../example/TestRefreshHFilesEndpoint.java | 180 .../hadoop/hbase/HBaseTestingUtility.java | 18 + 7 files changed, 1389 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/75ab445e/hbase-examples/pom.xml -- diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index a3d16e0..0a8be15 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -305,6 +305,7 @@ if we can combine these profiles somehow --> BulkDelete.proto Examples.proto + RefreshHFiles.proto ${basedir}/src/main/java/ http://git-wip-us.apache.org/repos/asf/hbase/blob/75ab445e/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java new file mode 100644 index 000..38f0362 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.example; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This client class is for invoking the refresh HFile function deployed on the + * Region Server side via the RefreshHFilesService. + */ +public class RefreshHFilesClient implements Closeable { + private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private final Connection connection; + + /** + * Constructor with Conf object + * + * @param cfg + */ + public RefreshHFilesClient(Configuration cfg) { +try { + this.connection = ConnectionFactory.createConnection(cfg); +} catch (IOException e) { + throw new RuntimeException(e); +} + } + + @Override + public void close() throws IOException { +if (this.connection != null && !this.connection.isClosed()) { + this.connection.close(); +} + } + + public void refreshHFiles(final TableName tableName) throws Throwable { +try (Table table = connection.getTable(tableName)) { +
hbase git commit: HBASE-18448 Added refresh HFiles coprocessor endpoint
Repository: hbase Updated Branches: refs/heads/branch-1 9c26a42ab -> 7d605fe9c HBASE-18448 Added refresh HFiles coprocessor endpoint Signed-off-by: Michael StackProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7d605fe9 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7d605fe9 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7d605fe9 Branch: refs/heads/branch-1 Commit: 7d605fe9c2ebd0e98a9e14067a6b44d117fb53eb Parents: 9c26a42 Author: Ajay Jadhav Authored: Mon Aug 21 16:31:15 2017 -0700 Committer: Michael Stack Committed: Thu Aug 24 16:05:04 2017 -0700 -- hbase-examples/pom.xml | 1 + .../client/example/RefreshHFilesClient.java | 95 ++ .../example/RefreshHFilesEndpoint.java | 86 ++ .../protobuf/generated/RefreshHFilesProtos.java | 973 +++ .../src/main/protobuf/RefreshHFiles.proto | 36 + .../example/TestRefreshHFilesEndpoint.java | 180 .../hadoop/hbase/HBaseTestingUtility.java | 18 + 7 files changed, 1389 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/7d605fe9/hbase-examples/pom.xml -- diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index a100ab2..c93c39e 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -305,6 +305,7 @@ if we can combine these profiles somehow --> BulkDelete.proto Examples.proto + RefreshHFiles.proto ${basedir}/src/main/java/ http://git-wip-us.apache.org/repos/asf/hbase/blob/7d605fe9/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java new file mode 100644 index 000..38f0362 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.example; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This client class is for invoking the refresh HFile function deployed on the + * Region Server side via the RefreshHFilesService. + */ +public class RefreshHFilesClient implements Closeable { + private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private final Connection connection; + + /** + * Constructor with Conf object + * + * @param cfg + */ + public RefreshHFilesClient(Configuration cfg) { +try { + this.connection = ConnectionFactory.createConnection(cfg); +} catch (IOException e) { + throw new RuntimeException(e); +} + } + + @Override + public void close() throws IOException { +if (this.connection != null && !this.connection.isClosed()) { + this.connection.close(); +} + } + + public void refreshHFiles(final TableName tableName) throws Throwable { +try (Table table = connection.getTable(tableName)) { +
hbase git commit: HBASE-18287 Remove log warning in PartitionedMobCompactor.java#getFileStatus
Repository: hbase Updated Branches: refs/heads/branch-2 6252ae22c -> 99e84a26f HBASE-18287 Remove log warning in PartitionedMobCompactor.java#getFileStatus Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/99e84a26 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/99e84a26 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/99e84a26 Branch: refs/heads/branch-2 Commit: 99e84a26f54814b880d03929eba8deed303613be Parents: 6252ae2 Author: Huaxiang SunAuthored: Mon Aug 21 15:05:59 2017 -0700 Committer: Huaxiang Sun Committed: Thu Aug 24 15:03:58 2017 -0700 -- .../compactions/PartitionedMobCompactor.java| 23 +--- 1 file changed, 10 insertions(+), 13 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/99e84a26/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index da664cd..c378a88 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -915,23 +915,20 @@ public class PartitionedMobCompactor extends MobCompactor { private FileStatus getLinkedFileStatus(HFileLink link) throws IOException { Path[] locations = link.getLocations(); +FileStatus file; for (Path location : locations) { - FileStatus file = getFileStatus(location); - if (file != null) { -return file; - } -} -return null; - } - private FileStatus getFileStatus(Path path) throws IOException { -try { - if (path != null) { -return fs.getFileStatus(path); + if (location != null) { +try { + file = fs.getFileStatus(location); + if (file != null) { +return file; + } +} catch (FileNotFoundException e) { +} } -} catch (FileNotFoundException e) { - LOG.warn("The file " + path + " can not be found", e); } +LOG.warn("The file " + link + " links to can not be found"); return null; } }
hbase git commit: HBASE-18287 Remove log warning in PartitionedMobCompactor.java#getFileStatus
Repository: hbase Updated Branches: refs/heads/master bd0b0afa6 -> 1a2c38b96 HBASE-18287 Remove log warning in PartitionedMobCompactor.java#getFileStatus Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1a2c38b9 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1a2c38b9 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1a2c38b9 Branch: refs/heads/master Commit: 1a2c38b9650a0b0d0da74e7db311a51b42ba859d Parents: bd0b0af Author: Huaxiang SunAuthored: Mon Aug 21 15:05:59 2017 -0700 Committer: Huaxiang Sun Committed: Thu Aug 24 14:43:44 2017 -0700 -- .../compactions/PartitionedMobCompactor.java| 23 +--- 1 file changed, 10 insertions(+), 13 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1a2c38b9/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index da664cd..c378a88 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -915,23 +915,20 @@ public class PartitionedMobCompactor extends MobCompactor { private FileStatus getLinkedFileStatus(HFileLink link) throws IOException { Path[] locations = link.getLocations(); +FileStatus file; for (Path location : locations) { - FileStatus file = getFileStatus(location); - if (file != null) { -return file; - } -} -return null; - } - private FileStatus getFileStatus(Path path) throws IOException { -try { - if (path != null) { -return fs.getFileStatus(path); + if (location != null) { +try { + file = fs.getFileStatus(location); + if (file != null) { +return file; + } +} catch (FileNotFoundException e) { +} } -} catch (FileNotFoundException e) { - LOG.warn("The file " + path + " can not be found", e); } +LOG.warn("The file " + link + " links to can not be found"); return null; } }
[1/3] hbase git commit: HBASE-18656 First issues found by error-prone
Repository: hbase Updated Branches: refs/heads/branch-1.2 863c2f7be -> 8834a9ee6 refs/heads/branch-1.3 b51cfc6ab -> 501fc3a5b refs/heads/branch-1.4 a4f7e5b6a -> 32bc800bf HBASE-18656 First issues found by error-prone Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/32bc800b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/32bc800b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/32bc800b Branch: refs/heads/branch-1.4 Commit: 32bc800bf512cec9076cf862de49071dc74a3187 Parents: a4f7e5b Author: Mike DrobAuthored: Wed Aug 23 16:43:50 2017 -0500 Committer: Mike Drob Committed: Thu Aug 24 14:12:00 2017 -0500 -- .../hadoop/hbase/util/ConcatenatedLists.java| 77 +--- .../apache/hadoop/hbase/TestChoreService.java | 12 +-- .../hbase/util/TestConcatenatedLists.java | 4 +- .../hadoop/hbase/util/TestDrainBarrier.java | 6 +- 4 files changed, 15 insertions(+), 84 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/32bc800b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java index 8a3f6c5..f6fb4b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java @@ -18,10 +18,8 @@ */ package org.apache.hadoop.hbase.util; -import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -34,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add. */ @InterfaceAudience.Private -public class ConcatenatedLists implements Collection { +public class ConcatenatedLists extends AbstractCollection { protected final ArrayList components = new ArrayList
(); protected int size = 0; @@ -57,77 +55,6 @@ public class ConcatenatedLists implements Collection { } @Override - public boolean isEmpty() { -return this.size == 0; - } - - @Override - public boolean contains(Object o) { -for (List component : this.components) { - if (component.contains(o)) return true; -} -return false; - } - - @Override - public boolean containsAll(Collection c) { -for (Object o : c) { - if (!contains(o)) return false; -} -return true; - } - - @Override - public Object[] toArray() { -return toArray((Object[])Array.newInstance(Object.class, this.size)); - } - - @Override - @SuppressWarnings("unchecked") - public U[] toArray(U[] a) { -U[] result = (a.length == this.size()) ? a -: (U[])Array.newInstance(a.getClass().getComponentType(), this.size); -int i = 0; -for (List component : this.components) { - for (T t : component) { -result[i] = (U)t; -++i; - } -} -return result; - } - - @Override - public boolean add(T e) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean remove(Object o) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean addAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean removeAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean retainAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public void clear() { -throw new UnsupportedOperationException(); - } - - @Override public java.util.Iterator iterator() { return new Iterator(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/32bc800b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java -- diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index cc7b91f..06ce6d0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -38,6 +38,8 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestChoreService { + public static final Log log = LogFactory.getLog(TestChoreService.class); + /** * A few
[2/3] hbase git commit: HBASE-18656 First issues found by error-prone
HBASE-18656 First issues found by error-prone Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/501fc3a5 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/501fc3a5 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/501fc3a5 Branch: refs/heads/branch-1.3 Commit: 501fc3a5bdc56b6652cd7c265e58d92b57088f98 Parents: b51cfc6 Author: Mike DrobAuthored: Wed Aug 23 16:43:50 2017 -0500 Committer: Mike Drob Committed: Thu Aug 24 14:12:11 2017 -0500 -- .../hadoop/hbase/util/ConcatenatedLists.java| 77 +--- .../apache/hadoop/hbase/TestChoreService.java | 12 +-- .../hbase/util/TestConcatenatedLists.java | 4 +- .../hadoop/hbase/util/TestDrainBarrier.java | 6 +- 4 files changed, 15 insertions(+), 84 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/501fc3a5/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java index 8a3f6c5..f6fb4b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java @@ -18,10 +18,8 @@ */ package org.apache.hadoop.hbase.util; -import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -34,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add. */ @InterfaceAudience.Private -public class ConcatenatedLists implements Collection { +public class ConcatenatedLists extends AbstractCollection { protected final ArrayList components = new ArrayList
(); protected int size = 0; @@ -57,77 +55,6 @@ public class ConcatenatedLists implements Collection { } @Override - public boolean isEmpty() { -return this.size == 0; - } - - @Override - public boolean contains(Object o) { -for (List component : this.components) { - if (component.contains(o)) return true; -} -return false; - } - - @Override - public boolean containsAll(Collection c) { -for (Object o : c) { - if (!contains(o)) return false; -} -return true; - } - - @Override - public Object[] toArray() { -return toArray((Object[])Array.newInstance(Object.class, this.size)); - } - - @Override - @SuppressWarnings("unchecked") - public U[] toArray(U[] a) { -U[] result = (a.length == this.size()) ? a -: (U[])Array.newInstance(a.getClass().getComponentType(), this.size); -int i = 0; -for (List component : this.components) { - for (T t : component) { -result[i] = (U)t; -++i; - } -} -return result; - } - - @Override - public boolean add(T e) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean remove(Object o) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean addAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean removeAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean retainAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public void clear() { -throw new UnsupportedOperationException(); - } - - @Override public java.util.Iterator iterator() { return new Iterator(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/501fc3a5/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java -- diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index cc7b91f..06ce6d0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -38,6 +38,8 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestChoreService { + public static final Log log = LogFactory.getLog(TestChoreService.class); + /** * A few ScheduledChore samples that are useful for testing with ChoreService */ @@ -77,7 +79,7 @@ public class TestChoreService { try { Thread.sleep(getPeriod() * 2);
[3/3] hbase git commit: HBASE-18656 First issues found by error-prone
HBASE-18656 First issues found by error-prone Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8834a9ee Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8834a9ee Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8834a9ee Branch: refs/heads/branch-1.2 Commit: 8834a9ee60c9fae2754ab92cbf855541ef419f18 Parents: 863c2f7 Author: Mike DrobAuthored: Wed Aug 23 16:43:50 2017 -0500 Committer: Mike Drob Committed: Thu Aug 24 14:12:22 2017 -0500 -- .../hadoop/hbase/util/ConcatenatedLists.java| 77 +--- .../apache/hadoop/hbase/TestChoreService.java | 12 +-- .../hbase/util/TestConcatenatedLists.java | 4 +- .../hadoop/hbase/util/TestDrainBarrier.java | 6 +- 4 files changed, 15 insertions(+), 84 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8834a9ee/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java index 8a3f6c5..f6fb4b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java @@ -18,10 +18,8 @@ */ package org.apache.hadoop.hbase.util; -import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -34,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add. */ @InterfaceAudience.Private -public class ConcatenatedLists implements Collection { +public class ConcatenatedLists extends AbstractCollection { protected final ArrayList components = new ArrayList
(); protected int size = 0; @@ -57,77 +55,6 @@ public class ConcatenatedLists implements Collection { } @Override - public boolean isEmpty() { -return this.size == 0; - } - - @Override - public boolean contains(Object o) { -for (List component : this.components) { - if (component.contains(o)) return true; -} -return false; - } - - @Override - public boolean containsAll(Collection c) { -for (Object o : c) { - if (!contains(o)) return false; -} -return true; - } - - @Override - public Object[] toArray() { -return toArray((Object[])Array.newInstance(Object.class, this.size)); - } - - @Override - @SuppressWarnings("unchecked") - public U[] toArray(U[] a) { -U[] result = (a.length == this.size()) ? a -: (U[])Array.newInstance(a.getClass().getComponentType(), this.size); -int i = 0; -for (List component : this.components) { - for (T t : component) { -result[i] = (U)t; -++i; - } -} -return result; - } - - @Override - public boolean add(T e) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean remove(Object o) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean addAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean removeAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean retainAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public void clear() { -throw new UnsupportedOperationException(); - } - - @Override public java.util.Iterator iterator() { return new Iterator(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/8834a9ee/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java -- diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index bb81cc2..cf68601 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -38,6 +38,8 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestChoreService { + public static final Log log = LogFactory.getLog(TestChoreService.class); + /** * A few ScheduledChore samples that are useful for testing with ChoreService */ @@ -77,7 +79,7 @@ public class TestChoreService { try { Thread.sleep(getPeriod() * 2);
hbase git commit: HBASE-18656 First issues found by error-prone
Repository: hbase Updated Branches: refs/heads/branch-1 016ead793 -> 9c26a42ab HBASE-18656 First issues found by error-prone Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9c26a42a Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9c26a42a Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9c26a42a Branch: refs/heads/branch-1 Commit: 9c26a42ab19ad86262d16c21d78a905c94c6a5e1 Parents: 016ead7 Author: Mike DrobAuthored: Wed Aug 23 16:43:50 2017 -0500 Committer: Mike Drob Committed: Thu Aug 24 13:04:45 2017 -0500 -- .../hadoop/hbase/util/ConcatenatedLists.java| 77 +--- .../apache/hadoop/hbase/TestChoreService.java | 12 +-- .../hbase/util/TestConcatenatedLists.java | 4 +- .../hadoop/hbase/util/TestDrainBarrier.java | 6 +- 4 files changed, 15 insertions(+), 84 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/9c26a42a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java index 8a3f6c5..f6fb4b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java @@ -18,10 +18,8 @@ */ package org.apache.hadoop.hbase.util; -import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -34,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add. */ @InterfaceAudience.Private -public class ConcatenatedLists implements Collection { +public class ConcatenatedLists extends AbstractCollection { protected final ArrayList components = new ArrayList
(); protected int size = 0; @@ -57,77 +55,6 @@ public class ConcatenatedLists implements Collection { } @Override - public boolean isEmpty() { -return this.size == 0; - } - - @Override - public boolean contains(Object o) { -for (List component : this.components) { - if (component.contains(o)) return true; -} -return false; - } - - @Override - public boolean containsAll(Collection c) { -for (Object o : c) { - if (!contains(o)) return false; -} -return true; - } - - @Override - public Object[] toArray() { -return toArray((Object[])Array.newInstance(Object.class, this.size)); - } - - @Override - @SuppressWarnings("unchecked") - public U[] toArray(U[] a) { -U[] result = (a.length == this.size()) ? a -: (U[])Array.newInstance(a.getClass().getComponentType(), this.size); -int i = 0; -for (List component : this.components) { - for (T t : component) { -result[i] = (U)t; -++i; - } -} -return result; - } - - @Override - public boolean add(T e) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean remove(Object o) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean addAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean removeAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean retainAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public void clear() { -throw new UnsupportedOperationException(); - } - - @Override public java.util.Iterator iterator() { return new Iterator(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/9c26a42a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java -- diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index cc7b91f..06ce6d0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -38,6 +38,8 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestChoreService { + public static final Log log = LogFactory.getLog(TestChoreService.class); + /** * A few ScheduledChore samples that are useful for testing with ChoreService */ @@ -77,7 +79,7 @@ public
[1/2] hbase git commit: HBASE-18656 First issues found by error-prone
Repository: hbase Updated Branches: refs/heads/branch-2 6230d2150 -> 6252ae22c refs/heads/master 1b4e935ce -> bd0b0afa6 HBASE-18656 First issues found by error-prone Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6252ae22 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6252ae22 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6252ae22 Branch: refs/heads/branch-2 Commit: 6252ae22cf6a7ae5ccbc074b48692617f9b0bcc1 Parents: 6230d21 Author: Mike DrobAuthored: Wed Aug 23 16:43:50 2017 -0500 Committer: Mike Drob Committed: Thu Aug 24 12:16:14 2017 -0500 -- .../hadoop/hbase/util/ConcatenatedLists.java| 76 +--- .../apache/hadoop/hbase/TestChoreService.java | 14 ++-- .../hbase/util/TestConcatenatedLists.java | 4 +- .../hadoop/hbase/util/TestDrainBarrier.java | 6 +- 4 files changed, 17 insertions(+), 83 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6252ae22/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java index ba54f9d..4217906 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java @@ -18,9 +18,8 @@ */ package org.apache.hadoop.hbase.util; -import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.NoSuchElementException; @@ -33,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add. */ @InterfaceAudience.Private -public class ConcatenatedLists implements Collection { +public class ConcatenatedLists extends AbstractCollection { protected final ArrayList components = new ArrayList<>(); protected int size = 0; @@ -56,77 +55,6 @@ public class ConcatenatedLists implements Collection { } @Override - public boolean isEmpty() { -return this.size == 0; - } - - @Override - public boolean contains(Object o) { -for (List component : this.components) { - if (component.contains(o)) return true; -} -return false; - } - - @Override - public boolean containsAll(Collection c) { -for (Object o : c) { - if (!contains(o)) return false; -} -return true; - } - - @Override - public Object[] toArray() { -return toArray((Object[])Array.newInstance(Object.class, this.size)); - } - - @Override - @SuppressWarnings("unchecked") - public U[] toArray(U[] a) { -U[] result = (a.length == this.size()) ? a -: (U[])Array.newInstance(a.getClass().getComponentType(), this.size); -int i = 0; -for (List component : this.components) { - for (T t : component) { -result[i] = (U)t; -++i; - } -} -return result; - } - - @Override - public boolean add(T e) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean remove(Object o) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean addAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean removeAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean retainAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public void clear() { -throw new UnsupportedOperationException(); - } - - @Override public java.util.Iterator iterator() { return new Iterator(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/6252ae22/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java -- diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index f290e1d..06ce6d0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -24,6 +24,8 @@ import static org.junit.Assert.assertTrue; import java.util.concurrent.TimeUnit; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.CountingChore; import
[2/2] hbase git commit: HBASE-18656 First issues found by error-prone
HBASE-18656 First issues found by error-prone Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bd0b0afa Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bd0b0afa Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bd0b0afa Branch: refs/heads/master Commit: bd0b0afa61e9b0bc54316596dd9ae6bd8a2fe684 Parents: 1b4e935 Author: Mike DrobAuthored: Wed Aug 23 16:43:50 2017 -0500 Committer: Mike Drob Committed: Thu Aug 24 12:16:31 2017 -0500 -- .../hadoop/hbase/util/ConcatenatedLists.java| 76 +--- .../apache/hadoop/hbase/TestChoreService.java | 14 ++-- .../hbase/util/TestConcatenatedLists.java | 4 +- .../hadoop/hbase/util/TestDrainBarrier.java | 6 +- 4 files changed, 17 insertions(+), 83 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/bd0b0afa/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java index ba54f9d..4217906 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java @@ -18,9 +18,8 @@ */ package org.apache.hadoop.hbase.util; -import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.NoSuchElementException; @@ -33,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add. */ @InterfaceAudience.Private -public class ConcatenatedLists implements Collection { +public class ConcatenatedLists extends AbstractCollection { protected final ArrayList components = new ArrayList<>(); protected int size = 0; @@ -56,77 +55,6 @@ public class ConcatenatedLists implements Collection { } @Override - public boolean isEmpty() { -return this.size == 0; - } - - @Override - public boolean contains(Object o) { -for (List component : this.components) { - if (component.contains(o)) return true; -} -return false; - } - - @Override - public boolean containsAll(Collection c) { -for (Object o : c) { - if (!contains(o)) return false; -} -return true; - } - - @Override - public Object[] toArray() { -return toArray((Object[])Array.newInstance(Object.class, this.size)); - } - - @Override - @SuppressWarnings("unchecked") - public U[] toArray(U[] a) { -U[] result = (a.length == this.size()) ? a -: (U[])Array.newInstance(a.getClass().getComponentType(), this.size); -int i = 0; -for (List component : this.components) { - for (T t : component) { -result[i] = (U)t; -++i; - } -} -return result; - } - - @Override - public boolean add(T e) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean remove(Object o) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean addAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean removeAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public boolean retainAll(Collection c) { -throw new UnsupportedOperationException(); - } - - @Override - public void clear() { -throw new UnsupportedOperationException(); - } - - @Override public java.util.Iterator iterator() { return new Iterator(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/bd0b0afa/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java -- diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index f290e1d..06ce6d0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -24,6 +24,8 @@ import static org.junit.Assert.assertTrue; import java.util.concurrent.TimeUnit; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.CountingChore; import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.DoNothingChore; import
hbase git commit: HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan
Repository: hbase Updated Branches: refs/heads/branch-1.4 f73b76287 -> a4f7e5b6a HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan Signed-off-by: tedyuProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4f7e5b6 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4f7e5b6 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4f7e5b6 Branch: refs/heads/branch-1.4 Commit: a4f7e5b6ae8992a69dd1c7405831e4dfefdfe589 Parents: f73b762 Author: Guangxu Cheng Authored: Fri Aug 25 00:36:09 2017 +0800 Committer: tedyu Committed: Thu Aug 24 09:44:53 2017 -0700 -- .../org/apache/hadoop/hbase/rest/Constants.java | 1 + .../apache/hadoop/hbase/rest/TableResource.java | 5 ++--- .../hadoop/hbase/rest/TableScanResource.java | 19 --- 3 files changed, 11 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/a4f7e5b6/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java index 54ce117..9e1a224 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java @@ -70,6 +70,7 @@ public interface Constants { String SCAN_FETCH_SIZE = "hbase.rest.scan.fetchsize"; String SCAN_FILTER = "filter"; String SCAN_REVERSED = "reversed"; + String SCAN_CACHE_BLOCKS = "cacheblocks"; String CUSTOM_FILTERS = "hbase.rest.custom.filters"; String ROW_KEYS_PARAM_NAME = "row"; http://git-wip-us.apache.org/repos/asf/hbase/blob/a4f7e5b6/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java index 126328f..2127dad 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java @@ -122,9 +122,7 @@ public class TableResource extends ResourceBase { @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION") @Path("{scanspec: .*[*]$}") public TableScanResource getScanResource( - final @Context UriInfo uriInfo, final @PathParam("scanspec") String scanSpec, - final @HeaderParam("Accept") String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @@ -134,7 +132,7 @@ public class TableResource extends ResourceBase { @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, - @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, + @DefaultValue("true") @QueryParam(Constants.SCAN_CACHE_BLOCKS) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String paramFilter) { try { @@ -204,6 +202,7 @@ public class TableResource extends ResourceBase { int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10); tableScan.setCaching(fetchSize); tableScan.setReversed(reversed); + tableScan.setCacheBlocks(cacheBlocks); return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit); } catch (Exception exp) { servlet.getMetrics().incrementFailedScanRequests(1); http://git-wip-us.apache.org/repos/asf/hbase/blob/a4f7e5b6/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java index 5cc2c7b..3effc01 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java @@ -67,6 +67,9 @@ public class TableScanResource extends ResourceBase { @GET @Produces({ Constants.MIMETYPE_XML, Constants.MIMETYPE_JSON }) public
hbase git commit: HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan
Repository: hbase Updated Branches: refs/heads/branch-1 5015913ce -> 016ead793 HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan Signed-off-by: tedyuProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/016ead79 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/016ead79 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/016ead79 Branch: refs/heads/branch-1 Commit: 016ead793631d148c09d1df24807618661b9f884 Parents: 5015913 Author: Guangxu Cheng Authored: Fri Aug 25 00:36:09 2017 +0800 Committer: tedyu Committed: Thu Aug 24 09:44:22 2017 -0700 -- .../org/apache/hadoop/hbase/rest/Constants.java | 1 + .../apache/hadoop/hbase/rest/TableResource.java | 5 ++--- .../hadoop/hbase/rest/TableScanResource.java | 19 --- 3 files changed, 11 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/016ead79/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java index 54ce117..9e1a224 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java @@ -70,6 +70,7 @@ public interface Constants { String SCAN_FETCH_SIZE = "hbase.rest.scan.fetchsize"; String SCAN_FILTER = "filter"; String SCAN_REVERSED = "reversed"; + String SCAN_CACHE_BLOCKS = "cacheblocks"; String CUSTOM_FILTERS = "hbase.rest.custom.filters"; String ROW_KEYS_PARAM_NAME = "row"; http://git-wip-us.apache.org/repos/asf/hbase/blob/016ead79/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java index 126328f..2127dad 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java @@ -122,9 +122,7 @@ public class TableResource extends ResourceBase { @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION") @Path("{scanspec: .*[*]$}") public TableScanResource getScanResource( - final @Context UriInfo uriInfo, final @PathParam("scanspec") String scanSpec, - final @HeaderParam("Accept") String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @@ -134,7 +132,7 @@ public class TableResource extends ResourceBase { @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, - @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, + @DefaultValue("true") @QueryParam(Constants.SCAN_CACHE_BLOCKS) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String paramFilter) { try { @@ -204,6 +202,7 @@ public class TableResource extends ResourceBase { int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10); tableScan.setCaching(fetchSize); tableScan.setReversed(reversed); + tableScan.setCacheBlocks(cacheBlocks); return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit); } catch (Exception exp) { servlet.getMetrics().incrementFailedScanRequests(1); http://git-wip-us.apache.org/repos/asf/hbase/blob/016ead79/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java index 5cc2c7b..3effc01 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java @@ -67,6 +67,9 @@ public class TableScanResource extends ResourceBase { @GET @Produces({ Constants.MIMETYPE_XML, Constants.MIMETYPE_JSON }) public
hbase git commit: HBASE-18658 Purge hokey hbase Service implementation; use (internal) Guava Service instead
Repository: hbase Updated Branches: refs/heads/branch-2 adbe844ea -> 6230d2150 HBASE-18658 Purge hokey hbase Service implementation; use (internal) Guava Service instead Removes hbase Service. Moves the single user, ClusterSchemaServiceImpl to use relocated internal Guava Service instead. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6230d215 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6230d215 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6230d215 Branch: refs/heads/branch-2 Commit: 6230d215061f87f1bb4def33670c39986158a3a2 Parents: adbe844 Author: Michael StackAuthored: Tue Aug 22 15:27:17 2017 -0700 Committer: Michael Stack Committed: Thu Aug 24 09:25:42 2017 -0700 -- .../java/org/apache/hadoop/hbase/Service.java | 50 .../hbase/master/ClusterSchemaService.java | 2 +- .../hbase/master/ClusterSchemaServiceImpl.java | 27 +-- .../org/apache/hadoop/hbase/master/HMaster.java | 22 - 4 files changed, 33 insertions(+), 68 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6230d215/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java deleted file mode 100644 index 97d93cc..000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import java.io.IOException; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - -/** - * Simple Service. - */ -// This is a WIP. We have Services throughout hbase. Either have all implement what is here or -// just remove this as an experiment that did not work out. -// TODO: Move on to guava Service after we update our guava version; later guava has nicer -// Service implmentation. -// TODO: Move all Services on to this one Interface. -@InterfaceAudience.Private -public interface Service { - /** - * Initiates service startup (if necessary), returning once the service has finished starting. - * @throws IOException Throws exception if already running and if we fail to start successfully. - */ - void startAndWait() throws IOException; - - /** - * @return True if this Service is running. - */ - boolean isRunning(); - - /** - * Initiates service shutdown (if necessary), returning once the service has finished stopping. - * @throws IOException Throws exception if not running of if we fail to stop successfully. - */ - void stopAndWait() throws IOException; -} http://git-wip-us.apache.org/repos/asf/hbase/blob/6230d215/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java index 43353ba..9cfbe8c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.master; -import org.apache.hadoop.hbase.Service; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service; /** * Mixes in ClusterSchema and Service http://git-wip-us.apache.org/repos/asf/hbase/blob/6230d215/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaServiceImpl.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaServiceImpl.java
hbase git commit: HBASE-18658 Purge hokey hbase Service implementation; use (internal) Guava Service instead
Repository: hbase Updated Branches: refs/heads/master 88356029f -> 1b4e935ce HBASE-18658 Purge hokey hbase Service implementation; use (internal) Guava Service instead Removes hbase Service. Moves the single user, ClusterSchemaServiceImpl to use relocated internal Guava Service instead. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1b4e935c Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1b4e935c Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1b4e935c Branch: refs/heads/master Commit: 1b4e935cec47cf03c18e11b789177a6484661abd Parents: 8835602 Author: Michael StackAuthored: Tue Aug 22 15:27:17 2017 -0700 Committer: Michael Stack Committed: Thu Aug 24 09:19:36 2017 -0700 -- .../java/org/apache/hadoop/hbase/Service.java | 50 .../hbase/master/ClusterSchemaService.java | 2 +- .../hbase/master/ClusterSchemaServiceImpl.java | 27 +-- .../org/apache/hadoop/hbase/master/HMaster.java | 22 - 4 files changed, 33 insertions(+), 68 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1b4e935c/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java deleted file mode 100644 index 97d93cc..000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Service.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import java.io.IOException; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - -/** - * Simple Service. - */ -// This is a WIP. We have Services throughout hbase. Either have all implement what is here or -// just remove this as an experiment that did not work out. -// TODO: Move on to guava Service after we update our guava version; later guava has nicer -// Service implmentation. -// TODO: Move all Services on to this one Interface. -@InterfaceAudience.Private -public interface Service { - /** - * Initiates service startup (if necessary), returning once the service has finished starting. - * @throws IOException Throws exception if already running and if we fail to start successfully. - */ - void startAndWait() throws IOException; - - /** - * @return True if this Service is running. - */ - boolean isRunning(); - - /** - * Initiates service shutdown (if necessary), returning once the service has finished stopping. - * @throws IOException Throws exception if not running of if we fail to stop successfully. - */ - void stopAndWait() throws IOException; -} http://git-wip-us.apache.org/repos/asf/hbase/blob/1b4e935c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java index 43353ba..9cfbe8c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaService.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.master; -import org.apache.hadoop.hbase.Service; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service; /** * Mixes in ClusterSchema and Service http://git-wip-us.apache.org/repos/asf/hbase/blob/1b4e935c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaServiceImpl.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterSchemaServiceImpl.java
hbase git commit: Revert "HBASE-18448 Added refresh HFiles coprocessor endpoint"
Repository: hbase Updated Branches: refs/heads/master 612c23556 -> 88356029f Revert "HBASE-18448 Added refresh HFiles coprocessor endpoint" This reverts commit 612c23556d4d2b6ef609ce7c487efa9bed35e145. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/88356029 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/88356029 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/88356029 Branch: refs/heads/master Commit: 88356029f17430b77df15aaedbe17651397494a7 Parents: 612c235 Author: anoopsamjohnAuthored: Thu Aug 24 21:13:08 2017 +0530 Committer: anoopsamjohn Committed: Thu Aug 24 21:13:08 2017 +0530 -- .../client/example/RefreshHFilesClient.java | 95 -- .../example/RefreshHFilesEndpoint.java | 86 - .../src/main/protobuf/RefreshHFiles.proto | 36 .../example/TestRefreshHFilesEndpoint.java | 177 --- .../hadoop/hbase/HBaseTestingUtility.java | 18 -- 5 files changed, 412 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/88356029/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java deleted file mode 100644 index 0401959..000 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.client.example; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.coprocessor.Batch; -import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; -import org.apache.hadoop.hbase.ipc.ServerRpcController; -import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; - -import java.io.Closeable; -import java.io.IOException; - -/** - * This client class is for invoking the refresh HFile function deployed on the - * Region Server side via the RefreshHFilesService. - */ -public class RefreshHFilesClient implements Closeable { - private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); - private final Connection connection; - - /** - * Constructor with Conf object - * - * @param cfg - */ - public RefreshHFilesClient(Configuration cfg) { -try { - this.connection = ConnectionFactory.createConnection(cfg); -} catch (IOException e) { - throw new RuntimeException(e); -} - } - - @Override - public void close() throws IOException { -if (this.connection != null && !this.connection.isClosed()) { - this.connection.close(); -} - } - - public void refreshHFiles(final TableName tableName) throws Throwable { -try (Table table = connection.getTable(tableName)) { - refreshHFiles(table); -} - } - - public void refreshHFiles(final Table table) throws Throwable { -final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest - .getDefaultInstance(); -table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW, - HConstants.EMPTY_END_ROW, - new Batch.Call () { -
hbase git commit: Revert "HBASE-18448 Added refresh HFiles coprocessor endpoint"
Repository: hbase Updated Branches: refs/heads/branch-2 e5a8f162a -> adbe844ea Revert "HBASE-18448 Added refresh HFiles coprocessor endpoint" This reverts commit e5a8f162a2ded86b42f40650d879fb730afb84dc. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/adbe844e Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/adbe844e Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/adbe844e Branch: refs/heads/branch-2 Commit: adbe844ea5c0bbb9d11ccab222e25cfbfdae3208 Parents: e5a8f16 Author: anoopsamjohnAuthored: Thu Aug 24 21:12:12 2017 +0530 Committer: anoopsamjohn Committed: Thu Aug 24 21:12:12 2017 +0530 -- .../client/example/RefreshHFilesClient.java | 95 -- .../example/RefreshHFilesEndpoint.java | 86 - .../src/main/protobuf/RefreshHFiles.proto | 36 .../example/TestRefreshHFilesEndpoint.java | 177 --- .../hadoop/hbase/HBaseTestingUtility.java | 18 -- 5 files changed, 412 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/adbe844e/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java deleted file mode 100644 index 0401959..000 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.client.example; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.coprocessor.Batch; -import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; -import org.apache.hadoop.hbase.ipc.ServerRpcController; -import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; - -import java.io.Closeable; -import java.io.IOException; - -/** - * This client class is for invoking the refresh HFile function deployed on the - * Region Server side via the RefreshHFilesService. - */ -public class RefreshHFilesClient implements Closeable { - private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); - private final Connection connection; - - /** - * Constructor with Conf object - * - * @param cfg - */ - public RefreshHFilesClient(Configuration cfg) { -try { - this.connection = ConnectionFactory.createConnection(cfg); -} catch (IOException e) { - throw new RuntimeException(e); -} - } - - @Override - public void close() throws IOException { -if (this.connection != null && !this.connection.isClosed()) { - this.connection.close(); -} - } - - public void refreshHFiles(final TableName tableName) throws Throwable { -try (Table table = connection.getTable(tableName)) { - refreshHFiles(table); -} - } - - public void refreshHFiles(final Table table) throws Throwable { -final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest - .getDefaultInstance(); -table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW, - HConstants.EMPTY_END_ROW, - new Batch.Call () { -
hbase git commit: HBASE-18448 Added refresh HFiles coprocessor endpoint
Repository: hbase Updated Branches: refs/heads/branch-2 08212e50f -> e5a8f162a HBASE-18448 Added refresh HFiles coprocessor endpoint Signed-off-by: anoopsamjohnProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e5a8f162 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e5a8f162 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e5a8f162 Branch: refs/heads/branch-2 Commit: e5a8f162a2ded86b42f40650d879fb730afb84dc Parents: 08212e5 Author: Ajay Jadhav Authored: Mon Aug 21 17:24:28 2017 -0700 Committer: anoopsamjohn Committed: Thu Aug 24 20:44:45 2017 +0530 -- .../client/example/RefreshHFilesClient.java | 95 ++ .../example/RefreshHFilesEndpoint.java | 86 + .../src/main/protobuf/RefreshHFiles.proto | 36 .../example/TestRefreshHFilesEndpoint.java | 177 +++ .../hadoop/hbase/HBaseTestingUtility.java | 18 ++ 5 files changed, 412 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/e5a8f162/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java new file mode 100644 index 000..0401959 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.example; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This client class is for invoking the refresh HFile function deployed on the + * Region Server side via the RefreshHFilesService. + */ +public class RefreshHFilesClient implements Closeable { + private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private final Connection connection; + + /** + * Constructor with Conf object + * + * @param cfg + */ + public RefreshHFilesClient(Configuration cfg) { +try { + this.connection = ConnectionFactory.createConnection(cfg); +} catch (IOException e) { + throw new RuntimeException(e); +} + } + + @Override + public void close() throws IOException { +if (this.connection != null && !this.connection.isClosed()) { + this.connection.close(); +} + } + + public void refreshHFiles(final TableName tableName) throws Throwable { +try (Table table = connection.getTable(tableName)) { + refreshHFiles(table); +} + } + + public void refreshHFiles(final Table table) throws Throwable { +final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest + .getDefaultInstance(); +table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW, + HConstants.EMPTY_END_ROW, + new Batch.Call () { +
hbase git commit: HBASE-18448 Added refresh HFiles coprocessor endpoint
Repository: hbase Updated Branches: refs/heads/master 321bc55f9 -> 612c23556 HBASE-18448 Added refresh HFiles coprocessor endpoint Signed-off-by: anoopsamjohnProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/612c2355 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/612c2355 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/612c2355 Branch: refs/heads/master Commit: 612c23556d4d2b6ef609ce7c487efa9bed35e145 Parents: 321bc55 Author: Ajay Jadhav Authored: Mon Aug 21 17:24:28 2017 -0700 Committer: anoopsamjohn Committed: Thu Aug 24 20:42:49 2017 +0530 -- .../client/example/RefreshHFilesClient.java | 95 ++ .../example/RefreshHFilesEndpoint.java | 86 + .../src/main/protobuf/RefreshHFiles.proto | 36 .../example/TestRefreshHFilesEndpoint.java | 177 +++ .../hadoop/hbase/HBaseTestingUtility.java | 18 ++ 5 files changed, 412 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/612c2355/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java -- diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java new file mode 100644 index 000..0401959 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.example; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This client class is for invoking the refresh HFile function deployed on the + * Region Server side via the RefreshHFilesService. + */ +public class RefreshHFilesClient implements Closeable { + private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private final Connection connection; + + /** + * Constructor with Conf object + * + * @param cfg + */ + public RefreshHFilesClient(Configuration cfg) { +try { + this.connection = ConnectionFactory.createConnection(cfg); +} catch (IOException e) { + throw new RuntimeException(e); +} + } + + @Override + public void close() throws IOException { +if (this.connection != null && !this.connection.isClosed()) { + this.connection.close(); +} + } + + public void refreshHFiles(final TableName tableName) throws Throwable { +try (Table table = connection.getTable(tableName)) { + refreshHFiles(table); +} + } + + public void refreshHFiles(final Table table) throws Throwable { +final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest + .getDefaultInstance(); +table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW, + HConstants.EMPTY_END_ROW, + new Batch.Call () { +
[39/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html index 3ac42fc..5041272 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html @@ -34,564 +34,466 @@ 026import java.util.HashSet; 027import java.util.List; 028import java.util.Map; -029import java.util.Map.Entry; -030import java.util.Set; -031import java.util.TreeMap; -032 -033import org.apache.commons.logging.Log; -034import org.apache.commons.logging.LogFactory; -035import org.apache.hadoop.conf.Configuration; -036import org.apache.hadoop.hbase.Abortable; -037import org.apache.hadoop.hbase.HColumnDescriptor; -038import org.apache.hadoop.hbase.HConstants; -039import org.apache.hadoop.hbase.HTableDescriptor; -040import org.apache.hadoop.hbase.ReplicationPeerNotFoundException; -041import org.apache.hadoop.hbase.TableName; -042import org.apache.hadoop.hbase.TableNotFoundException; -043import org.apache.hadoop.hbase.classification.InterfaceAudience; -044import org.apache.hadoop.hbase.client.Admin; -045import org.apache.hadoop.hbase.client.Connection; -046import org.apache.hadoop.hbase.client.ConnectionFactory; -047import org.apache.hadoop.hbase.client.HBaseAdmin; -048import org.apache.hadoop.hbase.client.RegionLocator; -049import org.apache.hadoop.hbase.replication.ReplicationException; -050import org.apache.hadoop.hbase.replication.ReplicationFactory; -051import org.apache.hadoop.hbase.replication.ReplicationPeer; -052import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; -053import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; -054import org.apache.hadoop.hbase.replication.ReplicationPeerZKImpl; -055import org.apache.hadoop.hbase.replication.ReplicationPeers; -056import org.apache.hadoop.hbase.replication.ReplicationQueuesClient; -057import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments; -058import org.apache.hadoop.hbase.util.Pair; -059import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; -060 -061import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -062import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -063 -064/** +029import java.util.Set; +030import java.util.TreeMap; +031 +032import org.apache.commons.logging.Log; +033import org.apache.commons.logging.LogFactory; +034import org.apache.hadoop.conf.Configuration; +035import org.apache.hadoop.hbase.HConstants; +036import org.apache.hadoop.hbase.ReplicationPeerNotFoundException; +037import org.apache.hadoop.hbase.TableName; +038import org.apache.hadoop.hbase.classification.InterfaceAudience; +039import org.apache.hadoop.hbase.client.Admin; +040import org.apache.hadoop.hbase.client.Connection; +041import org.apache.hadoop.hbase.client.ConnectionFactory; +042import org.apache.hadoop.hbase.replication.ReplicationException; +043import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; +044import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; +045 +046import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +047import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +048 +049/** +050 * p +051 * This class provides the administrative interface to HBase cluster +052 * replication. +053 * /p +054 * p +055 * Adding a new peer results in creating new outbound connections from every +056 * region server to a subset of region servers on the slave cluster. Each +057 * new stream of replication will start replicating from the beginning of the +058 * current WAL, meaning that edits from that past will be replicated. +059 * /p +060 * p +061 * Removing a peer is a destructive and irreversible operation that stops +062 * all the replication streams for the given cluster and deletes the metadata +063 * used to keep track of the replication state. +064 * /p 065 * p -066 * This class provides the administrative interface to HBase cluster -067 * replication. +066 * To see which commands are available in the shell, type +067 * codereplication/code. 068 * /p -069 * p -070 * Adding a new peer results in creating new outbound connections from every -071 * region server to a subset of region servers on the slave cluster. Each -072 * new stream of replication will start replicating from the beginning of the -073 * current WAL, meaning that edits from that past will be replicated. -074 * /p -075 * p -076 * Removing a peer is a destructive and irreversible operation that stops -077 * all the replication streams for the given
[41/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html index 8681390..e64f371 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html @@ -46,1440 +46,1442 @@ 038import org.apache.hadoop.fs.Path; 039import org.apache.hadoop.hbase.Coprocessor; 040import org.apache.hadoop.hbase.HConstants; -041import org.apache.hadoop.hbase.TableName; -042import org.apache.hadoop.hbase.classification.InterfaceAudience; -043import org.apache.hadoop.hbase.exceptions.DeserializationException; -044import org.apache.hadoop.hbase.security.User; -045import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -046import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; -047import org.apache.hadoop.hbase.util.Bytes; -048 -049@InterfaceAudience.Public -050public class TableDescriptorBuilder { -051 public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class); -052 @InterfaceAudience.Private -053 public static final String SPLIT_POLICY = "SPLIT_POLICY"; -054 private static final Bytes SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY)); -055 /** -056 * Used by HBase Shell interface to access this metadata -057 * attribute which denotes the maximum size of the store file after which a -058 * region split occurs. -059 */ -060 @InterfaceAudience.Private -061 public static final String MAX_FILESIZE = "MAX_FILESIZE"; -062 private static final Bytes MAX_FILESIZE_KEY -063 = new Bytes(Bytes.toBytes(MAX_FILESIZE)); -064 -065 @InterfaceAudience.Private -066 public static final String OWNER = "OWNER"; -067 @InterfaceAudience.Private -068 public static final Bytes OWNER_KEY -069 = new Bytes(Bytes.toBytes(OWNER)); -070 -071 /** -072 * Used by rest interface to access this metadata attribute -073 * which denotes if the table is Read Only. -074 */ -075 @InterfaceAudience.Private -076 public static final String READONLY = "READONLY"; -077 private static final Bytes READONLY_KEY -078 = new Bytes(Bytes.toBytes(READONLY)); -079 -080 /** -081 * Used by HBase Shell interface to access this metadata -082 * attribute which denotes if the table is compaction enabled. -083 */ -084 @InterfaceAudience.Private -085 public static final String COMPACTION_ENABLED = "COMPACTION_ENABLED"; -086 private static final Bytes COMPACTION_ENABLED_KEY -087 = new Bytes(Bytes.toBytes(COMPACTION_ENABLED)); -088 -089 /** -090 * Used by HBase Shell interface to access this metadata -091 * attribute which represents the maximum size of the memstore after which its -092 * contents are flushed onto the disk. -093 */ -094 @InterfaceAudience.Private -095 public static final String MEMSTORE_FLUSHSIZE = "MEMSTORE_FLUSHSIZE"; -096 private static final Bytes MEMSTORE_FLUSHSIZE_KEY -097 = new Bytes(Bytes.toBytes(MEMSTORE_FLUSHSIZE)); -098 -099 @InterfaceAudience.Private -100 public static final String FLUSH_POLICY = "FLUSH_POLICY"; -101 private static final Bytes FLUSH_POLICY_KEY = new Bytes(Bytes.toBytes(FLUSH_POLICY)); -102 /** -103 * Used by rest interface to access this metadata attribute -104 * which denotes if it is a catalog table, either code hbase:meta /code. -105 */ -106 @InterfaceAudience.Private -107 public static final String IS_META = "IS_META"; -108 private static final Bytes IS_META_KEY -109 = new Bytes(Bytes.toBytes(IS_META)); -110 -111 /** -112 * {@link Durability} setting for the table. -113 */ -114 @InterfaceAudience.Private -115 public static final String DURABILITY = "DURABILITY"; -116 private static final Bytes DURABILITY_KEY -117 = new Bytes(Bytes.toBytes("DURABILITY")); -118 -119 /** -120 * The number of region replicas for the table. -121 */ -122 @InterfaceAudience.Private -123 public static final String REGION_REPLICATION = "REGION_REPLICATION"; -124 private static final Bytes REGION_REPLICATION_KEY -125 = new Bytes(Bytes.toBytes(REGION_REPLICATION)); -126 -127 /** -128 * The flag to indicate whether or not the memstore should be -129 * replicated for read-replicas (CONSISTENCY =gt; TIMELINE). -130 */ -131 @InterfaceAudience.Private -132 public static final String REGION_MEMSTORE_REPLICATION = "REGION_MEMSTORE_REPLICATION"; -133 private static final Bytes REGION_MEMSTORE_REPLICATION_KEY -134 = new Bytes(Bytes.toBytes(REGION_MEMSTORE_REPLICATION)); -135 -136 /** -137 * Used by shell/rest interface to access this metadata -138 * attribute which denotes if the table
[50/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/index-all.html -- diff --git a/apidocs/index-all.html b/apidocs/index-all.html index 5d0d4dc..c2452c8 100644 --- a/apidocs/index-all.html +++ b/apidocs/index-all.html @@ -568,6 +568,14 @@ For creating AsyncAdmin. +AsyncBufferedMutator - Interface in org.apache.hadoop.hbase.client + +Used to communicate with a single HBase table in batches. + +AsyncBufferedMutatorBuilder - Interface in org.apache.hadoop.hbase.client + +For creating AsyncBufferedMutator. + AsyncConnection - Interface in org.apache.hadoop.hbase.client The asynchronous version of Connection. @@ -837,6 +845,10 @@ Create a AsyncAdmin instance. +build() - Method in interface org.apache.hadoop.hbase.client.AsyncBufferedMutatorBuilder + +Create the AsyncBufferedMutator instance. + build() - Method in interface org.apache.hadoop.hbase.client.AsyncTableBuilder Create the AsyncTable or RawAsyncTable instance. @@ -1349,6 +1361,10 @@ close() - Method in interface org.apache.hadoop.hbase.client.Admin +close() - Method in interface org.apache.hadoop.hbase.client.AsyncBufferedMutator + +Performs a AsyncBufferedMutator.flush() and releases any resources held. + close() - Method in interface org.apache.hadoop.hbase.client.BufferedMutator Performs a BufferedMutator.flush() and releases any resources held. @@ -2081,6 +2097,8 @@ copy(TableDescriptor) - Static method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder +copy(TableName, TableDescriptor) - Static method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder + copy(byte[]) - Static method in class org.apache.hadoop.hbase.util.Bytes Copy the byte array given in parameter and return an instance @@ -5118,6 +5136,10 @@ Flush a table. +flush() - Method in interface org.apache.hadoop.hbase.client.AsyncBufferedMutator + +Executes all the buffered, asynchronous operations. + flush() - Method in interface org.apache.hadoop.hbase.client.BufferedMutator Executes all the buffered, asynchronous Mutation operations and waits until they @@ -5462,6 +5484,14 @@ getBufferedCounterForEachRegionServer() - Method in class org.apache.hadoop.hbase.client.HTableMultiplexer.HTableMultiplexerStatus +getBufferedMutator(TableName) - Method in interface org.apache.hadoop.hbase.client.AsyncConnection + +Retrieve an AsyncBufferedMutator for performing client-side buffering of writes. + +getBufferedMutator(TableName, ExecutorService) - Method in interface org.apache.hadoop.hbase.client.AsyncConnection + +Retrieve an AsyncBufferedMutator for performing client-side buffering of writes. + getBufferedMutator(TableName) - Method in interface org.apache.hadoop.hbase.client.Connection @@ -5471,6 +5501,14 @@ Retrieve a BufferedMutator for performing client-side buffering of writes. +getBufferedMutatorBuilder(TableName) - Method in interface org.apache.hadoop.hbase.client.AsyncConnection + +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + +getBufferedMutatorBuilder(TableName, ExecutorService) - Method in interface org.apache.hadoop.hbase.client.AsyncConnection + +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + getByteBuffer() - Method in class org.apache.hadoop.hbase.io.ByteBufferOutputStream This flips the underlying BB so be sure to use it _last_! @@ -5705,6 +5743,10 @@ getConfiguration() - Method in interface org.apache.hadoop.hbase.client.Admin +getConfiguration() - Method in interface org.apache.hadoop.hbase.client.AsyncBufferedMutator + +Returns the Configuration object used by this instance. + getConfiguration() - Method in interface org.apache.hadoop.hbase.client.AsyncConnection Returns the Configuration object used by this instance. @@ -6468,6 +6510,11 @@ getMutations() - Method in class org.apache.hadoop.hbase.client.RowMutations +getName() - Method in interface org.apache.hadoop.hbase.client.AsyncBufferedMutator + +Gets the fully qualified table name instance of the table that this + AsyncBufferedMutator writes to. + getName() - Method in interface org.apache.hadoop.hbase.client.AsyncTableBase Gets the fully qualified table name instance of this table. @@ -7847,6 +7894,10 @@ getWaitInterval() - Method in exception org.apache.hadoop.hbase.quotas.ThrottlingException +getWriteBufferSize() - Method in interface org.apache.hadoop.hbase.client.AsyncBufferedMutator + +Returns the maximum size in bytes of the write buffer. + getWriteBufferSize() - Method in interface org.apache.hadoop.hbase.client.BufferedMutator Returns the maximum size in bytes of the write buffer for this HTable. @@ -9667,7 +9718,7 @@ Deprecated. This is vestigial API. -isSameHostnameAndPort(ServerName, ServerName) - Static method in class org.apache.hadoop.hbase.ServerName +isSameAddress(ServerName, ServerName) -
[46/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html b/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html index d1710e6..70979e6 100644 --- a/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html +++ b/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":10,"i7":9,"i8":9,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":9}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":9,"i7":10,"i8":9,"i9":9,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":42,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class TableDescriptorBuilder +public class TableDescriptorBuilder extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object @@ -216,117 +216,122 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? copy(TableDescriptordesc) +static TableDescriptor +copy(TableNamename, +TableDescriptordesc) + + TableDescriptorBuilder modifyColumnFamily(ColumnFamilyDescriptorfamily) - + static TableDescriptorBuilder newBuilder(TableDescriptordesc) Copy all configuration, values, families, and name from the input. - + static TableDescriptorBuilder newBuilder(TableNamename) - + static TableDescriptor parseFrom(byte[]pbBytes) The input should be created by toByteArray(org.apache.hadoop.hbase.client.TableDescriptor). - + TableDescriptorBuilder removeColumnFamily(byte[]name) - + TableDescriptorBuilder removeConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) - + TableDescriptorBuilder removeCoprocessor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringclassName) - + TableDescriptorBuilder removeValue(byte[]key) - + TableDescriptorBuilder removeValue(Byteskey) - + TableDescriptorBuilder setCompactionEnabled(booleanisEnable) - + TableDescriptorBuilder setConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringvalue) - + TableDescriptorBuilder setDurability(Durabilitydurability) - + TableDescriptorBuilder setFlushPolicyClassName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringclazz) - + TableDescriptorBuilder setMaxFileSize(longmaxFileSize) - + TableDescriptorBuilder setMemStoreFlushSize(longmemstoreFlushSize) - + TableDescriptorBuilder setNormalizationEnabled(booleanisEnable) - + TableDescriptorBuilder setOwner(Userowner) Deprecated. - + TableDescriptorBuilder setOwnerString(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringownerString) Deprecated. - + TableDescriptorBuilder setPriority(intpriority) - + TableDescriptorBuilder setReadOnly(booleanreadOnly) - + TableDescriptorBuilder setRegionMemstoreReplication(booleanmemstoreReplication) - + TableDescriptorBuilder setRegionReplication(intregionReplication) - + TableDescriptorBuilder setRegionSplitPolicyClassName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringclazz) - + TableDescriptorBuilder setValue(byte[]key, byte[]value) - + TableDescriptorBuilder setValue(Byteskey, Bytesvalue) - + static byte[] toByteArray(TableDescriptordesc) @@ -358,7 +363,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -public static finalorg.apache.commons.logging.Log LOG +public static finalorg.apache.commons.logging.Log LOG @@ -367,7 +372,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
[10/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html index b70a167..4479b19 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html +++ b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html @@ -508,9 +508,9 @@ the order they are declared. static StoreFileWriter -MobUtils.createDelFileWriter(org.apache.hadoop.conf.Configurationconf, +MobUtils.createDelFileWriter(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, - HColumnDescriptorfamily, + ColumnFamilyDescriptorfamily, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringdate, org.apache.hadoop.fs.PathbasePath, longmaxKeyCount, @@ -523,42 +523,42 @@ the order they are declared. static StoreFileWriter -MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, +MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, ColumnFamilyDescriptorfamily, -org.apache.hadoop.fs.Pathpath, +MobFileNamemobFileName, +org.apache.hadoop.fs.PathbasePath, longmaxKeyCount, Compression.Algorithmcompression, CacheConfigcacheConfig, Encryption.ContextcryptoContext, -ChecksumTypechecksumType, -intbytesPerChecksum, -intblocksize, -BloomTypebloomType, booleanisCompaction) Creates a writer for the mob file in temp directory. static StoreFileWriter -MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, +MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, -HColumnDescriptorfamily, -MobFileNamemobFileName, -org.apache.hadoop.fs.PathbasePath, +ColumnFamilyDescriptorfamily, +org.apache.hadoop.fs.Pathpath, longmaxKeyCount, Compression.Algorithmcompression, CacheConfigcacheConfig, Encryption.ContextcryptoContext, +ChecksumTypechecksumType, +intbytesPerChecksum, +intblocksize, +BloomTypebloomType, booleanisCompaction) Creates a writer for the mob file in temp directory. static StoreFileWriter -MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, +MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, -HColumnDescriptorfamily, +ColumnFamilyDescriptorfamily, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringdate, org.apache.hadoop.fs.PathbasePath, longmaxKeyCount, @@ -572,9 +572,9 @@ the order they are declared. static StoreFileWriter -MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, +MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, -HColumnDescriptorfamily, +ColumnFamilyDescriptorfamily, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringdate, org.apache.hadoop.fs.PathbasePath, longmaxKeyCount, http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html b/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html index 4c3213b..cf269d0 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html +++ b/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html @@ -304,9 +304,9 @@ static StoreFileWriter -MobUtils.createDelFileWriter(org.apache.hadoop.conf.Configurationconf, +MobUtils.createDelFileWriter(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, - HColumnDescriptorfamily, + ColumnFamilyDescriptorfamily, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in
[07/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/hfile/ResizableBlockCache.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/ResizableBlockCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/ResizableBlockCache.html index ec4d3c1..d8cba3d 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/ResizableBlockCache.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/ResizableBlockCache.html @@ -143,7 +143,7 @@ extends BlockCache -cacheBlock, cacheBlock, evictBlock, evictBlocksByHfileName, getBlock, getBlockCaches, < a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getBlockCount--">getBlockCount, getCurrentSize, getFreeSize, getStats, iterator, returnBlock, shutdown, size +cacheBlock, cacheBlock, evictBlock, evictBlocksByHfileName, getBlock, getBlockCaches, < a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getBlockCount--">getBlockCount, getCurrentDataSize, getCurrentSize, getDataBlockCount, getFreeSize, getStats, iterator, returnBlock, shutdown, size http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html index d1dd74d..05cc357 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab"; -static class BucketCache.BucketEntry +static class BucketCache.BucketEntry extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable Item in cache. We expect this to be where most memory goes. Java uses 8 @@ -288,7 +288,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl serialVersionUID -private static finallong serialVersionUID +private static finallong serialVersionUID See Also: Constant Field Values @@ -301,7 +301,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl COMPARATOR -static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true; title="class or interface in java.util">ComparatorBucketCache.BucketEntry COMPARATOR +static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true; title="class or interface in java.util">ComparatorBucketCache.BucketEntry COMPARATOR @@ -310,7 +310,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl offsetBase -privateint offsetBase +privateint offsetBase @@ -319,7 +319,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl length -privateint length +privateint length @@ -328,7 +328,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl offset1 -privatebyte offset1 +privatebyte offset1 @@ -337,7 +337,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl deserialiserIndex -byte deserialiserIndex +byte deserialiserIndex @@ -346,7 +346,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl accessCounter -private volatilelong accessCounter +private volatilelong accessCounter @@ -355,7 +355,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl priority -privateBlockPriority priority +privateBlockPriority priority @@ -364,7 +364,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl markedForEvict -private volatileboolean markedForEvict +private volatileboolean markedForEvict @@ -373,7 +373,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl refCount -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicInteger refCount +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicInteger refCount @@ -382,7 +382,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Serializabl cachedTime -private finallong cachedTime +private finallong cachedTime Time this block was cached. Presumes we are created just before we are added to the cache. @@ -400,7 +400,7
[40/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/client/TableSnapshotScanner.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/TableSnapshotScanner.html b/apidocs/src-html/org/apache/hadoop/hbase/client/TableSnapshotScanner.html index 9615ff3..de8a96f 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/TableSnapshotScanner.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/TableSnapshotScanner.html @@ -39,166 +39,165 @@ 031import org.apache.hadoop.fs.Path; 032import org.apache.hadoop.hbase.CellUtil; 033import org.apache.hadoop.hbase.HRegionInfo; -034import org.apache.hadoop.hbase.HTableDescriptor; -035import org.apache.hadoop.hbase.classification.InterfaceAudience; -036import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; -037import org.apache.hadoop.hbase.util.FSUtils; -038 -039/** -040 * A Scanner which performs a scan over snapshot files. Using this class requires copying the -041 * snapshot to a temporary empty directory, which will copy the snapshot reference files into that -042 * directory. Actual data files are not copied. -043 * -044 * p -045 * This also allows one to run the scan from an -046 * online or offline hbase cluster. The snapshot files can be exported by using the -047 * {@link org.apache.hadoop.hbase.snapshot.ExportSnapshot} tool, -048 * to a pure-hdfs cluster, and this scanner can be used to -049 * run the scan directly over the snapshot files. The snapshot should not be deleted while there -050 * are open scanners reading from snapshot files. -051 * -052 * p -053 * An internal RegionScanner is used to execute the {@link Scan} obtained -054 * from the user for each region in the snapshot. -055 * p -056 * HBase owns all the data and snapshot files on the filesystem. Only the HBase user can read from -057 * snapshot files and data files. HBase also enforces security because all the requests are handled -058 * by the server layer, and the user cannot read from the data files directly. To read from snapshot -059 * files directly from the file system, the user who is running the MR job must have sufficient -060 * permissions to access snapshot and reference files. This means that to run mapreduce over -061 * snapshot files, the job has to be run as the HBase user or the user must have group or other -062 * priviledges in the filesystem (See HBASE-8369). Note that, given other users access to read from -063 * snapshot/data files will completely circumvent the access control enforced by HBase. -064 * @see org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat -065 */ -066@InterfaceAudience.Public -067public class TableSnapshotScanner extends AbstractClientScanner { -068 -069 private static final Log LOG = LogFactory.getLog(TableSnapshotScanner.class); -070 -071 private Configuration conf; -072 private String snapshotName; -073 private FileSystem fs; -074 private Path rootDir; -075 private Path restoreDir; -076 private Scan scan; -077 private ArrayListHRegionInfo regions; -078 private HTableDescriptor htd; -079 -080 private ClientSideRegionScanner currentRegionScanner = null; -081 private int currentRegion = -1; -082 -083 /** -084 * Creates a TableSnapshotScanner. -085 * @param conf the configuration -086 * @param restoreDir a temporary directory to copy the snapshot files into. Current user should -087 * have write permissions to this directory, and this should not be a subdirectory of rootdir. -088 * The scanner deletes the contents of the directory once the scanner is closed. -089 * @param snapshotName the name of the snapshot to read from -090 * @param scan a Scan representing scan parameters -091 * @throws IOException in case of error -092 */ -093 public TableSnapshotScanner(Configuration conf, Path restoreDir, -094 String snapshotName, Scan scan) throws IOException { -095this(conf, FSUtils.getRootDir(conf), restoreDir, snapshotName, scan); -096 } -097 -098 /** -099 * Creates a TableSnapshotScanner. -100 * @param conf the configuration -101 * @param rootDir root directory for HBase. -102 * @param restoreDir a temporary directory to copy the snapshot files into. Current user should -103 * have write permissions to this directory, and this should not be a subdirectory of rootdir. -104 * The scanner deletes the contents of the directory once the scanner is closed. -105 * @param snapshotName the name of the snapshot to read from -106 * @param scan a Scan representing scan parameters -107 * @throws IOException in case of error -108 */ -109 public TableSnapshotScanner(Configuration conf, Path rootDir, -110 Path restoreDir, String snapshotName, Scan scan) throws IOException { -111this.conf = conf; -112this.snapshotName = snapshotName; -113this.rootDir = rootDir; -114// restoreDir will be
[44/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html -- diff --git a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html index d50d155..e3e8ae5 100644 --- a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html +++ b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html @@ -183,18 +183,12 @@ void -ReplicationAdmin.peerAdded(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid) -Deprecated. - - - -void ReplicationAdmin.removePeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid, http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringtableCfs) Deprecated. - + void ReplicationAdmin.removePeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringtableCf) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/overview-tree.html -- diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html index 7a5e206..fa30006 100644 --- a/apidocs/overview-tree.html +++ b/apidocs/overview-tree.html @@ -716,6 +716,7 @@ org.apache.hadoop.hbase.client.AsyncAdmin org.apache.hadoop.hbase.client.AsyncAdminBuilder +org.apache.hadoop.hbase.client.AsyncBufferedMutatorBuilder org.apache.hadoop.hbase.client.AsyncTableBase org.apache.hadoop.hbase.client.AsyncTable @@ -728,11 +729,13 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true; title="class or interface in java.lang">AutoCloseable org.apache.hadoop.hbase.client.Admin (also extends org.apache.hadoop.hbase.Abortable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true; title="class or interface in java.io">Closeable) +org.apache.hadoop.hbase.client.AsyncBufferedMutator org.apache.hadoop.hbase.client.AsyncConnection org.apache.hadoop.hbase.client.BufferedMutator java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true; title="class or interface in java.io">Closeable org.apache.hadoop.hbase.client.Admin (also extends org.apache.hadoop.hbase.Abortable) +org.apache.hadoop.hbase.client.AsyncBufferedMutator org.apache.hadoop.hbase.client.AsyncConnection org.apache.hadoop.hbase.client.BufferedMutator org.apache.hadoop.io.Closeable http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html index 1198ee7..86163e2 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html @@ -647,190 +647,187 @@ 639if (this == obj) { 640 return true; 641} -642if (obj == null) { -643 return false; +642if (obj instanceof HColumnDescriptor) { +643 return delegatee.equals(((HColumnDescriptor) obj).delegatee); 644} -645if (!(obj instanceof HColumnDescriptor)) { -646 return false; -647} -648return compareTo((HColumnDescriptor)obj) == 0; -649 } -650 -651 /** -652 * @see java.lang.Object#hashCode() -653 */ -654 @Override -655 public int hashCode() { -656return delegatee.hashCode(); -657 } -658 -659 @Override -660 public int compareTo(HColumnDescriptor other) { -661return delegatee.compareTo(other.delegatee); -662 } -663 -664 /** -665 * @return This instance serialized with pb with pb magic prefix -666 * @see #parseFrom(byte[]) -667 */ -668 public byte[] toByteArray() { -669return ColumnFamilyDescriptorBuilder.toByteArray(delegatee); -670 } -671 -672 /** -673 * @param bytes A pb serialized {@link HColumnDescriptor} instance with pb magic prefix -674 * @return An instance of {@link HColumnDescriptor} made from codebytes/code
[05/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html index c262a95..06e851c 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html +++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":9,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":9,"i109" :10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":9,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":9}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":9,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":9,"i109" :10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":9,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":9}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.LimitedPrivate(value="Tools") -public class HMaster +public class HMaster extends HRegionServer implements MasterServices HMaster is the "master server" for HBase. An HBase cluster has one active @@ -504,8 +504,8 @@ implements long -addColumn(TableNametableName, - HColumnDescriptorcolumnDescriptor, +addColumn(TableNametableName, + ColumnFamilyDescriptorcolumnDescriptor, longnonceGroup, longnonce) Add a new column to an existing table @@ -548,31 +548,31 @@ implements private void -checkClassLoading(org.apache.hadoop.conf.Configurationconf, - HTableDescriptorhtd) +checkClassLoading(org.apache.hadoop.conf.Configurationconf, + TableDescriptorhtd) private void -checkCompactionPolicy(org.apache.hadoop.conf.Configurationconf, - HTableDescriptorhtd) +checkCompactionPolicy(org.apache.hadoop.conf.Configurationconf, + TableDescriptorhtd) private void -checkCompression(HColumnDescriptorhcd) +checkCompression(ColumnFamilyDescriptorhcd) private void -checkCompression(HTableDescriptorhtd) +checkCompression(TableDescriptorhtd) private void
hbase-site git commit: INFRA-10751 Empty commit
Repository: hbase-site Updated Branches: refs/heads/asf-site 4f1f2a0b1 -> 0c22829dd INFRA-10751 Empty commit Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/0c22829d Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/0c22829d Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/0c22829d Branch: refs/heads/asf-site Commit: 0c22829dd6b95de426d633baae6c7f90cd227496 Parents: 4f1f2a0 Author: jenkinsAuthored: Thu Aug 24 15:10:59 2017 + Committer: jenkins Committed: Thu Aug 24 15:10:59 2017 + -- --
[15/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html index 0f648f8..c45e00c 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html @@ -117,9 +117,17 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. +org.apache.hadoop.hbase.master.procedure + + + org.apache.hadoop.hbase.mob + +org.apache.hadoop.hbase.mob.compactions + + org.apache.hadoop.hbase.regionserver @@ -629,6 +637,19 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. Uses of ColumnFamilyDescriptor in org.apache.hadoop.hbase.master + +Fields in org.apache.hadoop.hbase.master with type parameters of type ColumnFamilyDescriptor + +Modifier and Type +Field and Description + + + +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListColumnFamilyDescriptor +MasterMobCompactionThread.CompactionRunner.hcds + + + Methods in org.apache.hadoop.hbase.master with parameters of type ColumnFamilyDescriptor @@ -637,55 +658,194 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. +long +HMaster.addColumn(TableNametableName, + ColumnFamilyDescriptorcolumnDescriptor, + longnonceGroup, + longnonce) + + +long +MasterServices.addColumn(TableNametableName, + ColumnFamilyDescriptorcolumn, + longnonceGroup, + longnonce) +Add a new column to an existing table + + + +private void +HMaster.checkCompression(ColumnFamilyDescriptorhcd) + + +private void +HMaster.checkEncryption(org.apache.hadoop.conf.Configurationconf, + ColumnFamilyDescriptorhcd) + + +private void +HMaster.checkReplicationScope(ColumnFamilyDescriptorhcd) + + +long +HMaster.modifyColumn(TableNametableName, +ColumnFamilyDescriptordescriptor, +longnonceGroup, +longnonce) + + +long +MasterServices.modifyColumn(TableNametableName, +ColumnFamilyDescriptordescriptor, +longnonceGroup, +longnonce) +Modify the column descriptor of an existing column in an existing table + + + void MasterCoprocessorHost.postAddColumn(TableNametableName, ColumnFamilyDescriptorcolumnFamily) - + void MasterCoprocessorHost.postCompletedAddColumnFamilyAction(TableNametableName, ColumnFamilyDescriptorcolumnFamily, Useruser) - + void MasterCoprocessorHost.postCompletedModifyColumnFamilyAction(TableNametableName, ColumnFamilyDescriptorcolumnFamily, Useruser) - + void MasterCoprocessorHost.postModifyColumn(TableNametableName, ColumnFamilyDescriptorcolumnFamily) - + boolean MasterCoprocessorHost.preAddColumn(TableNametableName, ColumnFamilyDescriptorcolumnFamily) - + boolean MasterCoprocessorHost.preAddColumnFamilyAction(TableNametableName, ColumnFamilyDescriptorcolumnFamily, Useruser) - + boolean MasterCoprocessorHost.preModifyColumn(TableNametableName, ColumnFamilyDescriptorcolumnFamily) - + boolean MasterCoprocessorHost.preModifyColumnFamilyAction(TableNametableName, ColumnFamilyDescriptorcolumnFamily, Useruser) - + private static ImmutableHColumnDescriptor MasterCoprocessorHost.toImmutableHColumnDescriptor(ColumnFamilyDescriptordesc) + +Method parameters in org.apache.hadoop.hbase.master with type arguments of type ColumnFamilyDescriptor + +Modifier and Type +Method and Description + + + +void +MasterMobCompactionThread.requestMobCompaction(org.apache.hadoop.conf.Configurationconf, +org.apache.hadoop.fs.FileSystemfs, +TableNametableName, +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListColumnFamilyDescriptorcolumns, +booleanallFiles) +Requests mob compaction + + + +void +HMaster.requestMobCompaction(TableNametableName, +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListColumnFamilyDescriptorcolumns, +booleanallFiles) +Requests mob compaction. + + + + + +Constructor parameters in org.apache.hadoop.hbase.master with type arguments of type
[28/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html index ebf172f..d7cee89 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html @@ -106,22 +106,6 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.master - - - -org.apache.hadoop.hbase.master.procedure - - - -org.apache.hadoop.hbase.mob - - - -org.apache.hadoop.hbase.mob.compactions - - - org.apache.hadoop.hbase.replication Multi Cluster Replication @@ -732,339 +716,6 @@ service. - - - -Uses of HColumnDescriptor in org.apache.hadoop.hbase.master - -Fields in org.apache.hadoop.hbase.master with type parameters of type HColumnDescriptor - -Modifier and Type -Field and Description - - - -private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHColumnDescriptor -MasterMobCompactionThread.CompactionRunner.hcds - - - - -Methods in org.apache.hadoop.hbase.master with parameters of type HColumnDescriptor - -Modifier and Type -Method and Description - - - -long -HMaster.addColumn(TableNametableName, - HColumnDescriptorcolumnDescriptor, - longnonceGroup, - longnonce) - - -long -MasterServices.addColumn(TableNametableName, - HColumnDescriptorcolumn, - longnonceGroup, - longnonce) -Add a new column to an existing table - - - -private void -HMaster.checkCompression(HColumnDescriptorhcd) - - -private void -HMaster.checkEncryption(org.apache.hadoop.conf.Configurationconf, - HColumnDescriptorhcd) - - -private void -HMaster.checkReplicationScope(HColumnDescriptorhcd) - - -long -HMaster.modifyColumn(TableNametableName, -HColumnDescriptordescriptor, -longnonceGroup, -longnonce) - - -long -MasterServices.modifyColumn(TableNametableName, -HColumnDescriptordescriptor, -longnonceGroup, -longnonce) -Modify the column descriptor of an existing column in an existing table - - - - - -Method parameters in org.apache.hadoop.hbase.master with type arguments of type HColumnDescriptor - -Modifier and Type -Method and Description - - - -void -MasterMobCompactionThread.requestMobCompaction(org.apache.hadoop.conf.Configurationconf, -org.apache.hadoop.fs.FileSystemfs, -TableNametableName, -http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHColumnDescriptorcolumns, -booleanallFiles) -Requests mob compaction - - - -void -HMaster.requestMobCompaction(TableNametableName, -http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHColumnDescriptorcolumns, -booleanallFiles) -Requests mob compaction. - - - - - -Constructor parameters in org.apache.hadoop.hbase.master with type arguments of type HColumnDescriptor - -Constructor and Description - - - -CompactionRunner(org.apache.hadoop.fs.FileSystemfs, -TableNametableName, -http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHColumnDescriptorhcds, -booleanallFiles, -http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) - - - - - - - -Uses of HColumnDescriptor in org.apache.hadoop.hbase.master.procedure - -Fields in org.apache.hadoop.hbase.master.procedure declared as HColumnDescriptor - -Modifier and Type -Field and Description - - - -private HColumnDescriptor -ModifyColumnFamilyProcedure.cfDescriptor - - -private HColumnDescriptor -AddColumnFamilyProcedure.cfDescriptor - - - - -Constructors in org.apache.hadoop.hbase.master.procedure with parameters of type HColumnDescriptor - -Constructor and Description - - - -AddColumnFamilyProcedure(MasterProcedureEnvenv, -TableNametableName, -HColumnDescriptorcfDescriptor) - - -AddColumnFamilyProcedure(MasterProcedureEnvenv, -TableNametableName, -HColumnDescriptorcfDescriptor, -ProcedurePrepareLatchlatch) - - -ModifyColumnFamilyProcedure(MasterProcedureEnvenv, - TableNametableName, - HColumnDescriptorcfDescriptor) - -
[01/51] [partial] hbase-site git commit: Published site at .
Repository: hbase-site Updated Branches: refs/heads/asf-site 5bd98ad47 -> 4f1f2a0b1 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html index 856ba9b..9e94483 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public interface MasterServices +public interface MasterServices extends Server Services Master supplies @@ -140,8 +140,8 @@ extends long -addColumn(TableNametableName, - HColumnDescriptorcolumn, +addColumn(TableNametableName, + ColumnFamilyDescriptorcolumn, longnonceGroup, longnonce) Add a new column to an existing table @@ -166,13 +166,13 @@ extends long -createSystemTable(HTableDescriptorhTableDescriptor) +createSystemTable(TableDescriptortableDescriptor) Create a system table using the given table definition. long -createTable(HTableDescriptordesc, +createTable(TableDescriptordesc, byte[][]splitKeys, longnonceGroup, longnonce) @@ -403,8 +403,8 @@ extends long -modifyColumn(TableNametableName, -HColumnDescriptordescriptor, +modifyColumn(TableNametableName, +ColumnFamilyDescriptordescriptor, longnonceGroup, longnonce) Modify the column descriptor of an existing column in an existing table @@ -412,8 +412,8 @@ extends long -modifyTable(TableNametableName, - HTableDescriptordescriptor, +modifyTable(TableNametableName, + TableDescriptordescriptor, longnonceGroup, longnonce) Modify the descriptor of an existing table @@ -510,7 +510,7 @@ extends getSnapshotManager -SnapshotManagergetSnapshotManager() +SnapshotManagergetSnapshotManager() Returns: the underlying snapshot manager @@ -523,7 +523,7 @@ extends getMasterProcedureManagerHost -MasterProcedureManagerHostgetMasterProcedureManagerHost() +MasterProcedureManagerHostgetMasterProcedureManagerHost() Returns: the underlying MasterProcedureManagerHost @@ -536,7 +536,7 @@ extends getClusterSchema -ClusterSchemagetClusterSchema() +ClusterSchemagetClusterSchema() Returns: Master's instance of ClusterSchema @@ -549,7 +549,7 @@ extends getAssignmentManager -AssignmentManagergetAssignmentManager() +AssignmentManagergetAssignmentManager() Returns: Master's instance of the AssignmentManager @@ -562,7 +562,7 @@ extends getMasterFileSystem -MasterFileSystemgetMasterFileSystem() +MasterFileSystemgetMasterFileSystem() Returns: Master's filesystem MasterFileSystem utility class. @@ -575,7 +575,7 @@ extends getMasterWalManager -MasterWalManagergetMasterWalManager() +MasterWalManagergetMasterWalManager() Returns: Master's WALs MasterWalManager utility class. @@ -588,7 +588,7 @@ extends getServerManager -ServerManagergetServerManager() +ServerManagergetServerManager() Returns: Master's ServerManager instance. @@ -601,7 +601,7 @@ extends getExecutorService -ExecutorServicegetExecutorService() +ExecutorServicegetExecutorService() Returns: Master's instance of ExecutorService @@ -614,7 +614,7 @@ extends getTableStateManager -TableStateManagergetTableStateManager() +TableStateManagergetTableStateManager() Returns: Master's instance of TableStateManager @@ -627,7 +627,7 @@ extends getMasterCoprocessorHost -MasterCoprocessorHostgetMasterCoprocessorHost() +MasterCoprocessorHostgetMasterCoprocessorHost() Returns: Master's instance of MasterCoprocessorHost @@ -640,7 +640,7 @@ extends getMasterQuotaManager -MasterQuotaManagergetMasterQuotaManager() +MasterQuotaManagergetMasterQuotaManager() Returns: Master's instance of MasterQuotaManager @@ -653,7 +653,7 @@ extends getRegionNormalizer -RegionNormalizergetRegionNormalizer() +RegionNormalizergetRegionNormalizer() Returns: Master's instance of RegionNormalizer @@ -666,7 +666,7 @@ extends getCatalogJanitor -CatalogJanitorgetCatalogJanitor() +CatalogJanitorgetCatalogJanitor() Returns: Master's instance of CatalogJanitor @@ -679,7 +679,7 @@ extends getMasterProcedureExecutor -ProcedureExecutorMasterProcedureEnvgetMasterProcedureExecutor() +ProcedureExecutorMasterProcedureEnvgetMasterProcedureExecutor() Returns: Master's instance of ProcedureExecutor @@ -692,7 +692,7 @@ extends getInitializedEvent -ProcedureEventgetInitializedEvent() +ProcedureEventgetInitializedEvent() Returns: Tripped when Master has finished initialization. @@ -705,7 +705,7 @@ extends getMasterMetrics -MetricsMastergetMasterMetrics() +MetricsMastergetMasterMetrics() Returns: Master's
[26/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html index a3bb9c4..4bd01f9 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html @@ -91,107 +91,63 @@ -org.apache.hadoop.hbase.backup.util - - - org.apache.hadoop.hbase.client Provides HBase Client - + org.apache.hadoop.hbase.constraint Restrict the domain of a data attribute, often times to fulfill business rules/requirements. - + org.apache.hadoop.hbase.coprocessor Table of Contents - + org.apache.hadoop.hbase.mapred Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce Input/OutputFormats, a table indexing MapReduce job, and utility methods. - + org.apache.hadoop.hbase.mapreduce Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce Input/OutputFormats, a table indexing MapReduce job, and utility methods. - -org.apache.hadoop.hbase.master - - - -org.apache.hadoop.hbase.master.assignment - - - -org.apache.hadoop.hbase.master.balancer - - - -org.apache.hadoop.hbase.master.procedure - - - -org.apache.hadoop.hbase.master.snapshot - - -org.apache.hadoop.hbase.mob - - - org.apache.hadoop.hbase.quotas - -org.apache.hadoop.hbase.regionserver - - -org.apache.hadoop.hbase.regionserver.handler - - - org.apache.hadoop.hbase.rest HBase REST - -org.apache.hadoop.hbase.rest.client - - -org.apache.hadoop.hbase.rest.model +org.apache.hadoop.hbase.rest.client -org.apache.hadoop.hbase.rsgroup +org.apache.hadoop.hbase.rest.model -org.apache.hadoop.hbase.snapshot +org.apache.hadoop.hbase.rsgroup org.apache.hadoop.hbase.tool - -org.apache.hadoop.hbase.util - - @@ -257,25 +213,17 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. HTableDescriptor -TableDescriptors.get(TableNametableName) - - -HTableDescriptor HTableDescriptor.modifyFamily(HColumnDescriptorfamily) Deprecated. Modifies the existing column family. - + static HTableDescriptor HTableDescriptor.parseFrom(byte[]bytes) Deprecated. - -HTableDescriptor -TableDescriptors.remove(TableNametablename) - HTableDescriptor HTableDescriptor.setCompactionEnabled(booleanisEnable) @@ -403,33 +351,6 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -Methods in org.apache.hadoop.hbase that return types with arguments of type HTableDescriptor - -Modifier and Type -Method and Description - - - -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptor -TableDescriptors.getAll() -Get Map of all HTableDescriptors. - - - -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptor -TableDescriptors.getAllDescriptors() -Get Map of all TableDescriptors. - - - -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptor -TableDescriptors.getByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) -Get Map of all NamespaceDescriptors for a given namespace. - - - - - Methods in org.apache.hadoop.hbase with parameters of type HTableDescriptor Modifier and Type @@ -437,12 +358,6 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -void -TableDescriptors.add(HTableDescriptorhtd) -Add or update descriptor - - - int HTableDescriptor.compareTo(HTableDescriptorother) Deprecated. @@ -500,59 +415,6 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. - - - -Uses of HTableDescriptor in org.apache.hadoop.hbase.backup.util - -Methods in org.apache.hadoop.hbase.backup.util that return HTableDescriptor - -Modifier and Type -Method and Description - - - -(package private) HTableDescriptor -RestoreTool.getTableDesc(TableNametableName) -Get table descriptor - - - -private HTableDescriptor -RestoreTool.getTableDescriptor(org.apache.hadoop.fs.FileSystemfileSys, - TableNametableName, -
[37/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/checkstyle-aggregate.html -- diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html index cc1bf4d..fe5256b 100644 --- a/checkstyle-aggregate.html +++ b/checkstyle-aggregate.html @@ -7,7 +7,7 @@ - + Apache HBase Checkstyle Results @@ -286,10 +286,10 @@ Warnings Errors -2026 +2030 0 0 -12821 +12845 Files @@ -957,7 +957,7 @@ org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java 0 0 -54 +55 org/apache/hadoop/hbase/client/CompactType.java 0 @@ -1002,7 +1002,7 @@ org/apache/hadoop/hbase/client/HBaseAdmin.java 0 0 -103 +105 org/apache/hadoop/hbase/client/HRegionLocator.java 0 @@ -1292,7 +1292,7 @@ org/apache/hadoop/hbase/client/TableDescriptorBuilder.java 0 0 -43 +44 org/apache/hadoop/hbase/client/TableSnapshotScanner.java 0 @@ -1397,7 +1397,7 @@ org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java 0 0 -44 +37 org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java 0 @@ -1747,12 +1747,12 @@ org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java 0 0 -30 +28 org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java 0 0 -8 +9 org/apache/hadoop/hbase/favored/FavoredNodesManager.java 0 @@ -2237,7 +2237,7 @@ org/apache/hadoop/hbase/io/hfile/BlockCache.java 0 0 -2 +3 org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java 0 @@ -2367,7 +2367,7 @@ org/apache/hadoop/hbase/io/hfile/LruBlockCache.java 0 0 -10 +13 org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java 0 @@ -3057,7 +3057,7 @@ org/apache/hadoop/hbase/master/CatalogJanitor.java 0 0 -28 +27 org/apache/hadoop/hbase/master/ClusterSchemaServiceImpl.java 0 @@ -3077,7 +3077,7 @@ org/apache/hadoop/hbase/master/HMaster.java 0 0 -252 +254 org/apache/hadoop/hbase/master/HMasterCommandLine.java 0 @@ -3107,522 +3107,532 @@ org/apache/hadoop/hbase/master/MasterFileSystem.java 0 0 -21 +23 org/apache/hadoop/hbase/master/MasterMetaBootstrap.java 0 0 1 +org/apache/hadoop/hbase/master/MasterMobCompactionThread.java +0 +0 +1 + org/apache/hadoop/hbase/master/MasterRpcServices.java 0 0 20 - + org/apache/hadoop/hbase/master/MasterServices.java 0 0 43 - + org/apache/hadoop/hbase/master/MasterStatusServlet.java 0 0 4 - + org/apache/hadoop/hbase/master/MasterWalManager.java 0 0 2 - + org/apache/hadoop/hbase/master/MetricsAssignmentManager.java 0 0 5 - + org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java 0 0 1 - + org/apache/hadoop/hbase/master/MetricsAssignmentManagerSourceImpl.java 0 0 4 - + org/apache/hadoop/hbase/master/MetricsMaster.java 0 0 4 - + org/apache/hadoop/hbase/master/MetricsMasterProcSource.java 0 0 1 - + org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceImpl.java 0 0 1 - + org/apache/hadoop/hbase/master/MetricsMasterSource.java 0 0 1 - + org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.java 0 0 2 - + org/apache/hadoop/hbase/master/RackManager.java 0 0 2 - + org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java 0 0 198 - + org/apache/hadoop/hbase/master/RegionPlan.java 0 0 3 - + org/apache/hadoop/hbase/master/RegionState.java 0 0 86 - + org/apache/hadoop/hbase/master/ServerListener.java 0 0 1 - + org/apache/hadoop/hbase/master/ServerManager.java 0 0 -25 - +24 + org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java 0 0 15 - + org/apache/hadoop/hbase/master/SnapshotSentinel.java 0 0 1 - + org/apache/hadoop/hbase/master/SplitLogManager.java 0 0 23 - + org/apache/hadoop/hbase/master/TableNamespaceManager.java 0 0 4 - + org/apache/hadoop/hbase/master/TableStateManager.java 0 0 9 - + org/apache/hadoop/hbase/master/assignment/AssignProcedure.java 0 0 4 - + org/apache/hadoop/hbase/master/assignment/AssignmentManager.java 0 0 33 - + org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java 0 0 14 - + org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java 0 0 32 - + org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java 0 0 85 - + org/apache/hadoop/hbase/master/assignment/RegionStateStore.java 0 0 11 - + org/apache/hadoop/hbase/master/assignment/RegionStates.java 0 0 22 - + org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java 0 0 5 - + org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java 0 0 81 - + org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java 0 0 9 - + org/apache/hadoop/hbase/master/assignment/Util.java 0 0 1 - + org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java 0 0 67 - + org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java 0 0 1 - + org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java 0 0 4 - +
[47/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutator.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutator.html b/apidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutator.html new file mode 100644 index 000..ccea7d5 --- /dev/null +++ b/apidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutator.html @@ -0,0 +1,359 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncBufferedMutator (Apache HBase 3.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6}; +var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Interface AsyncBufferedMutator + + + + + + +All Superinterfaces: +http://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true; title="class or interface in java.lang">AutoCloseable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true; title="class or interface in java.io">Closeable + + + +@InterfaceAudience.Public +public interface AsyncBufferedMutator +extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true; title="class or interface in java.io">Closeable +Used to communicate with a single HBase table in batches. Obtain an instance from a + AsyncConnection and call close() afterwards. + + The implementation is required to be thread safe. + + + + + + + + + + + +Method Summary + +All MethodsInstance MethodsAbstract Methods + +Modifier and Type +Method and Description + + +void +close() +Performs a flush() and releases any resources held. + + + +void +flush() +Executes all the buffered, asynchronous operations. + + + +org.apache.hadoop.conf.Configuration +getConfiguration() +Returns the Configuration object used by this instance. + + + +TableName +getName() +Gets the fully qualified table name instance of the table that this + AsyncBufferedMutator writes to. + + + +long +getWriteBufferSize() +Returns the maximum size in bytes of the write buffer. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Mutationmutations) +Send some Mutations to the table. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +mutate(Mutationmutation) +Sends a Mutation to the table. + + + + + + + + + + + + + + + + +Method Detail + + + + + +getName +TableNamegetName() +Gets the fully qualified table name instance of the table that this + AsyncBufferedMutator writes to. + + + + + + + +getConfiguration +org.apache.hadoop.conf.ConfigurationgetConfiguration() +Returns the Configuration object used by this instance. + + The reference returned is not a copy, so any change made to it will affect this instance. + + + + + + + +mutate +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Voidmutate(Mutationmutation) +Sends a Mutation to the table. The mutations will be buffered and sent over the wire as + part of a batch. Currently only supports Put and Delete mutations. + +Parameters:
[03/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html b/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html index 3e08342..471543f 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class MasterFileSystem +public class MasterFileSystem extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object This class abstracts a bunch of operations the HMaster needs to interact with the underlying file system like creating the initial layout, checking file @@ -313,8 +313,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? logFileSystemState(org.apache.commons.logging.Loglog) -static void -setInfoFamilyCachingForMeta(HTableDescriptormetaDescriptor, +static TableDescriptor +setInfoFamilyCachingForMeta(TableDescriptormetaDescriptor, booleanb) Enable in memory caching for hbase:meta @@ -351,7 +351,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG @@ -360,7 +360,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? HBASE_DIR_PERMS -public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String HBASE_DIR_PERMS +public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String HBASE_DIR_PERMS Parameter name for HBase instance root directory permission See Also: @@ -374,7 +374,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? HBASE_WAL_DIR_PERMS -public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String HBASE_WAL_DIR_PERMS +public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String HBASE_WAL_DIR_PERMS Parameter name for HBase WAL directory permission See Also: @@ -388,7 +388,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? conf -private finalorg.apache.hadoop.conf.Configuration conf +private finalorg.apache.hadoop.conf.Configuration conf @@ -397,7 +397,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? clusterId -privateClusterId clusterId +privateClusterId clusterId @@ -406,7 +406,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? fs -private finalorg.apache.hadoop.fs.FileSystem fs +private finalorg.apache.hadoop.fs.FileSystem fs @@ -415,7 +415,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? walFs -private finalorg.apache.hadoop.fs.FileSystem walFs +private finalorg.apache.hadoop.fs.FileSystem walFs @@ -424,7 +424,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? rootdir -private finalorg.apache.hadoop.fs.Path rootdir +private finalorg.apache.hadoop.fs.Path rootdir @@ -433,7 +433,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? tempdir -private finalorg.apache.hadoop.fs.Path tempdir +private finalorg.apache.hadoop.fs.Path tempdir @@ -442,7 +442,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? walRootDir -private finalorg.apache.hadoop.fs.Path walRootDir +private finalorg.apache.hadoop.fs.Path walRootDir @@ -451,7 +451,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? secureRootSubDirPerms -private finalorg.apache.hadoop.fs.permission.FsPermission secureRootSubDirPerms +private finalorg.apache.hadoop.fs.permission.FsPermission secureRootSubDirPerms @@ -460,7 +460,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? secureRootFilePerms -private finalorg.apache.hadoop.fs.permission.FsPermission secureRootFilePerms +private finalorg.apache.hadoop.fs.permission.FsPermission secureRootFilePerms @@ -469,7 +469,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? HiddenDirPerms -private finalorg.apache.hadoop.fs.permission.FsPermission HiddenDirPerms +private finalorg.apache.hadoop.fs.permission.FsPermission HiddenDirPerms @@ -478,7 +478,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? isSecurityEnabled
[19/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html index b516ead..f5bfbfd 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html +++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":41,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":42,"i25":42,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":9,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":41,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109 ":10,"i110":10,"i111":10,"i112":9,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":42,"i175":10,"i176":10,"i177":10,"i178":42,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":10,"i199":10,"i200":42,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2 09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10,"i230":10,"i231":10,"i232":10,"i233":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":41,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":42,"i25":42,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":9,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":41,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109 ":10,"i110":10,"i111":10,"i112":9,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":42,"i174":10,"i175":10,"i176":10,"i177":42,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":10,"i199":42,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
[13/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptorBuilder.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptorBuilder.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptorBuilder.html index 040530a..325c596 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptorBuilder.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptorBuilder.html @@ -88,6 +88,18 @@ Provides HBase Client + +org.apache.hadoop.hbase.master + + + +org.apache.hadoop.hbase.regionserver + + + +org.apache.hadoop.hbase.util + + @@ -231,6 +243,92 @@ + + + +Uses of TableDescriptorBuilder in org.apache.hadoop.hbase.master + +Methods in org.apache.hadoop.hbase.master that return types with arguments of type TableDescriptorBuilder + +Modifier and Type +Method and Description + + + +protected http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionTableDescriptorBuilder,TableDescriptorBuilder +HMaster.getMetaTableObserver() + + +protected http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionTableDescriptorBuilder,TableDescriptorBuilder +HMaster.getMetaTableObserver() + + + + + + + +Uses of TableDescriptorBuilder in org.apache.hadoop.hbase.regionserver + +Methods in org.apache.hadoop.hbase.regionserver that return types with arguments of type TableDescriptorBuilder + +Modifier and Type +Method and Description + + + +protected http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionTableDescriptorBuilder,TableDescriptorBuilder +HRegionServer.getMetaTableObserver() + + +protected http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionTableDescriptorBuilder,TableDescriptorBuilder +HRegionServer.getMetaTableObserver() + + + + + + + +Uses of TableDescriptorBuilder in org.apache.hadoop.hbase.util + +Methods in org.apache.hadoop.hbase.util that return TableDescriptorBuilder + +Modifier and Type +Method and Description + + + +static TableDescriptorBuilder +FSTableDescriptors.createMetaTableDescriptorBuilder(org.apache.hadoop.conf.Configurationconf) + + + + +Constructor parameters in org.apache.hadoop.hbase.util with type arguments of type TableDescriptorBuilder + +Constructor and Description + + + +FSTableDescriptors(org.apache.hadoop.conf.Configurationconf, + org.apache.hadoop.fs.FileSystemfs, + org.apache.hadoop.fs.Pathrootdir, + booleanfsreadonly, + booleanusecache, + http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionTableDescriptorBuilder,TableDescriptorBuildermetaObserver) + + +FSTableDescriptors(org.apache.hadoop.conf.Configurationconf, + org.apache.hadoop.fs.FileSystemfs, + org.apache.hadoop.fs.Pathrootdir, + booleanfsreadonly, + booleanusecache, + http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionTableDescriptorBuilder,TableDescriptorBuildermetaObserver) + + + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html b/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html index 05fcff0..b375dc1 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html +++ b/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html @@ -260,10 +260,10 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. intstartLogErrorsCnt) -ClientSideRegionScanner(org.apache.hadoop.conf.Configurationconf, +ClientSideRegionScanner(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, org.apache.hadoop.fs.PathrootDir, - HTableDescriptorhtd, + TableDescriptorhtd, HRegionInfohri, Scanscan, ScanMetricsscanMetrics)
[45/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html new file mode 100644 index 000..a38fe46 --- /dev/null +++ b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html @@ -0,0 +1,223 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +Uses of Interface org.apache.hadoop.hbase.client.AsyncBufferedMutatorBuilder (Apache HBase 3.0.0-SNAPSHOT API) + + + + + + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +Prev +Next + + +Frames +NoFrames + + +AllClasses + + + + + + + + + + +Uses of Interfaceorg.apache.hadoop.hbase.client.AsyncBufferedMutatorBuilder + + + + + +Packages that use AsyncBufferedMutatorBuilder + +Package +Description + + + +org.apache.hadoop.hbase.client + +Provides HBase Client + + + + + + + + + + +Uses of AsyncBufferedMutatorBuilder in org.apache.hadoop.hbase.client + +Methods in org.apache.hadoop.hbase.client that return AsyncBufferedMutatorBuilder + +Modifier and Type +Method and Description + + + +AsyncBufferedMutatorBuilder +AsyncConnection.getBufferedMutatorBuilder(TableNametableName) +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + + + +AsyncBufferedMutatorBuilder +AsyncConnection.getBufferedMutatorBuilder(TableNametableName, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + + + +AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setMaxAttempts(intmaxAttempts) +Set the max attempt times for an operation. + + + +default AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setMaxRetries(intmaxRetries) +Set the max retry times for an operation. + + + +AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setOperationTimeout(longtimeout, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Set timeout for the background flush operation. + + + +AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setRetryPause(longpause, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Set the base pause time for retrying. + + + +AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setRpcTimeout(longtimeout, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Set timeout for each rpc request when doing background flush. + + + +AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setStartLogErrorsCnt(intstartLogErrorsCnt) +Set the number of retries that are allowed before we start to log. + + + +AsyncBufferedMutatorBuilder +AsyncBufferedMutatorBuilder.setWriteBufferSize(longwriteBufferSize) +Override the write buffer size specified by the provided AsyncConnection's + Configuration instance, via the configuration key + hbase.client.write.buffer. + + + + + + + + + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +Prev +Next + + +Frames +NoFrames + + +AllClasses + + + + + + + + + +Copyright 20072017 https://www.apache.org/;>The Apache Software Foundation. All rights reserved. + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html b/apidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html index
[14/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html index bad64ea..94974ec 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html @@ -87,58 +87,94 @@ +org.apache.hadoop.hbase.backup.util + + + org.apache.hadoop.hbase.client Provides HBase Client - + org.apache.hadoop.hbase.constraint Restrict the domain of a data attribute, often times to fulfill business rules/requirements. - + org.apache.hadoop.hbase.coprocessor Table of Contents - + org.apache.hadoop.hbase.coprocessor.example - + org.apache.hadoop.hbase.mapreduce Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce Input/OutputFormats, a table indexing MapReduce job, and utility methods. - + org.apache.hadoop.hbase.master + +org.apache.hadoop.hbase.master.assignment + + + +org.apache.hadoop.hbase.master.balancer + + + +org.apache.hadoop.hbase.master.procedure + + + +org.apache.hadoop.hbase.master.snapshot + + + +org.apache.hadoop.hbase.mob + + org.apache.hadoop.hbase.regionserver -org.apache.hadoop.hbase.rest.client +org.apache.hadoop.hbase.regionserver.handler -org.apache.hadoop.hbase.rsgroup +org.apache.hadoop.hbase.rest.client -org.apache.hadoop.hbase.security.access +org.apache.hadoop.hbase.rsgroup +org.apache.hadoop.hbase.security.access + + + org.apache.hadoop.hbase.security.visibility + +org.apache.hadoop.hbase.snapshot + + + +org.apache.hadoop.hbase.util + + @@ -166,6 +202,65 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. + +Methods in org.apache.hadoop.hbase that return TableDescriptor + +Modifier and Type +Method and Description + + + +TableDescriptor +TableDescriptors.get(TableNametableName) + + +TableDescriptor +TableDescriptors.remove(TableNametablename) + + + + +Methods in org.apache.hadoop.hbase that return types with arguments of type TableDescriptor + +Modifier and Type +Method and Description + + + +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptor +TableDescriptors.getAll() +Get Map of all TableDescriptors. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptor +TableDescriptors.getAllDescriptors() +Get Map of all TableDescriptors. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptor +TableDescriptors.getByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +Get Map of all NamespaceDescriptors for a given namespace. + + + + + +Methods in org.apache.hadoop.hbase with parameters of type TableDescriptor + +Modifier and Type +Method and Description + + + +void +TableDescriptors.add(TableDescriptorhtd) +Add or update descriptor + + + + Constructors in org.apache.hadoop.hbase with parameters of type TableDescriptor @@ -180,6 +275,59 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. + + + +Uses of TableDescriptor in org.apache.hadoop.hbase.backup.util + +Methods in org.apache.hadoop.hbase.backup.util that return TableDescriptor + +Modifier and Type +Method and Description + + + +(package private) TableDescriptor +RestoreTool.getTableDesc(TableNametableName) +Get table descriptor + + + +private TableDescriptor +RestoreTool.getTableDescriptor(org.apache.hadoop.fs.FileSystemfileSys, + TableNametableName, + http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringlastIncrBackupId) + + + + +Methods in org.apache.hadoop.hbase.backup.util with parameters of type TableDescriptor + +Modifier and Type +Method and Description + + + +private void +RestoreTool.checkAndCreateTable(Connectionconn, + org.apache.hadoop.fs.PathtableBackupPath, + TableNametableName, + TableNametargetTableName, +
[43/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html index a4266b6..7e6c558 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html @@ -503,393 +503,390 @@ 495if (this == obj) { 496 return true; 497} -498if (obj == null) { -499 return false; +498if (obj instanceof HTableDescriptor) { +499 return delegatee.equals(((HTableDescriptor) obj).delegatee); 500} -501if (!(obj instanceof HTableDescriptor)) { -502 return false; -503} -504return compareTo((HTableDescriptor)obj) == 0; -505 } -506 -507 /** -508 * @see java.lang.Object#hashCode() -509 */ -510 @Override -511 public int hashCode() { -512return delegatee.hashCode(); -513 } -514 -515 // Comparable -516 -517 /** -518 * Compares the descriptor with another descriptor which is passed as a parameter. -519 * This compares the content of the two descriptors and not the reference. -520 * -521 * @return 0 if the contents of the descriptors are exactly matching, -522 * 1 if there is a mismatch in the contents -523 */ -524 @Override -525 public int compareTo(final HTableDescriptor other) { -526return delegatee.compareTo(other.delegatee); -527 } -528 -529 /** -530 * Returns an unmodifiable collection of all the {@link HColumnDescriptor} -531 * of all the column families of the table. -532 * @deprecated Use {@link #getColumnFamilies}. -533 * @return Immutable collection of {@link HColumnDescriptor} of all the -534 * column families. -535 */ -536 @Deprecated -537 public CollectionHColumnDescriptor getFamilies() { -538return Stream.of(delegatee.getColumnFamilies()) -539 .map(this::toHColumnDescriptor) -540 .collect(Collectors.toList()); -541 } -542 -543 /** -544 * Return true if there are at least one cf whose replication scope is serial. -545 */ -546 @Override -547 public boolean hasSerialReplicationScope() { -548return delegatee.hasSerialReplicationScope(); -549 } -550 -551 /** -552 * Returns the configured replicas per region -553 */ -554 @Override -555 public int getRegionReplication() { -556return delegatee.getRegionReplication(); -557 } -558 -559 /** -560 * Sets the number of replicas per region. -561 * @param regionReplication the replication factor per region -562 */ -563 public HTableDescriptor setRegionReplication(int regionReplication) { -564 getDelegateeForModification().setRegionReplication(regionReplication); -565return this; -566 } -567 -568 /** -569 * @return true if the read-replicas memstore replication is enabled. -570 */ -571 @Override -572 public boolean hasRegionMemstoreReplication() { -573return delegatee.hasRegionMemstoreReplication(); -574 } -575 -576 /** -577 * Enable or Disable the memstore replication from the primary region to the replicas. -578 * The replication will be used only for meta operations (e.g. flush, compaction, ...) -579 * -580 * @param memstoreReplication true if the new data written to the primary region -581 * should be replicated. -582 *false if the secondaries can tollerate to have new -583 * data only when the primary flushes the memstore. -584 */ -585 public HTableDescriptor setRegionMemstoreReplication(boolean memstoreReplication) { -586 getDelegateeForModification().setRegionMemstoreReplication(memstoreReplication); -587return this; -588 } -589 -590 public HTableDescriptor setPriority(int priority) { -591 getDelegateeForModification().setPriority(priority); -592return this; -593 } -594 -595 @Override -596 public int getPriority() { -597return delegatee.getPriority(); -598 } -599 -600 /** -601 * Returns all the column family names of the current table. The map of -602 * HTableDescriptor contains mapping of family name to HColumnDescriptors. -603 * This returns all the keys of the family map which represents the column -604 * family names of the table. -605 * -606 * @return Immutable sorted set of the keys of the families. -607 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 -608 * (a href="https://issues.apache.org/jira/browse/HBASE-18008"HBASE-18008/a;). -609 * Use {@link #getColumnFamilyNames()}. -610 */ -611 @Deprecated -612 public Setbyte[] getFamiliesKeys() { -613return delegatee.getColumnFamilyNames(); -614 } -615 -616 /** -617 * Returns the count of the column families of the table. -618 * -619
[33/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/deprecated-list.html -- diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html index 0e3f0f6..8f1231d 100644 --- a/devapidocs/deprecated-list.html +++ b/devapidocs/deprecated-list.html @@ -1191,518 +1191,515 @@ -org.apache.hadoop.hbase.client.replication.ReplicationAdmin.peerAdded(String) - - org.apache.hadoop.hbase.coprocessor.MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironment, TableName, HColumnDescriptor) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use MasterObserver.postAddColumnFamily(ObserverContext, TableName, ColumnFamilyDescriptor). - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironment, TableName, HColumnDescriptor) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, ColumnFamilyDescriptor). - + org.apache.hadoop.hbase.coprocessor.RegionObserver.postBulkLoadHFile(ObserverContextRegionCoprocessorEnvironment, ListPairbyte[], String, boolean) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-17123;>HBASE-17123). Use RegionObserver.postBulkLoadHFile(ObserverContext, List, Map, boolean). - + org.apache.hadoop.hbase.coprocessor.RegionObserver.postCompact(ObserverContextRegionCoprocessorEnvironment, Store, StoreFile) Use RegionObserver.postCompact(ObserverContext, Store, StoreFile, CompactionRequest) instead - + org.apache.hadoop.hbase.coprocessor.RegionObserver.postCompactSelection(ObserverContextRegionCoprocessorEnvironment, Store, ImmutableListStoreFile) use RegionObserver.postCompactSelection(ObserverContext, Store, ImmutableList, CompactionRequest) instead. - + org.apache.hadoop.hbase.coprocessor.RegionObserver.postCompleteSplit(ObserverContextRegionCoprocessorEnvironment) No longer called in hbase2/AMv2 given the master runs splits now; implement MasterObserver.postCompletedSplitRegionAction(ObserverContext, HRegionInfo, HRegionInfo) instead. - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironment, HTableDescriptor, HRegionInfo[]) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). Use MasterObserver.postCompletedCreateTableAction(ObserverContext, TableDescriptor, HRegionInfo[]) - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postDeleteColumn(ObserverContextMasterCoprocessorEnvironment, TableName, byte[]) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use MasterObserver.postDeleteColumnFamily(ObserverContext, TableName, byte[]). - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postDeleteColumnHandler(ObserverContextMasterCoprocessorEnvironment, TableName, byte[]) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use MasterObserver.postCompletedDeleteColumnFamilyAction(ObserverContext, TableName, byte[]). - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postDeleteTableHandler(ObserverContextMasterCoprocessorEnvironment, TableName) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). Use MasterObserver.postCompletedDeleteTableAction(ObserverContext, TableName). - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postDisableTableHandler(ObserverContextMasterCoprocessorEnvironment, TableName) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). Use MasterObserver.postCompletedDisableTableAction(ObserverContext, TableName). - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postDispatchMerge(ObserverContextMasterCoprocessorEnvironment, HRegionInfo, HRegionInfo) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-;>HBASE-). Use MasterObserver.postMergeRegions(ObserverContext, HRegionInfo[]) - + org.apache.hadoop.hbase.coprocessor.MasterObserver.postEnableTableHandler(ObserverContextMasterCoprocessorEnvironment, TableName) As of release 2.0.0, this will be removed in HBase 3.0.0 (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). Use
[18/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html index 05a7362..a1ce360 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public static class TableDescriptorBuilder.ModifyableTableDescriptor +public static class TableDescriptorBuilder.ModifyableTableDescriptor extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements TableDescriptor, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableTableDescriptorBuilder.ModifyableTableDescriptor TODO: make this private after removing the HTableDescriptor @@ -672,7 +672,7 @@ implements name -private finalTableName name +private finalTableName name @@ -681,7 +681,7 @@ implements values -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytes values +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytes values A map which holds the metadata information of the table. This metadata includes values like IS_META, SPLIT_POLICY, MAX_FILE_SIZE, READONLY, MEMSTORE_FLUSHSIZE etc... @@ -693,7 +693,7 @@ implements configuration -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String configuration +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String configuration A map which holds the configuration specific to the table. The keys of the map have the same names as config keys and override the defaults with table-specific settings. Example usage may be for compactions, etc. @@ -705,7 +705,7 @@ implements families -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],ColumnFamilyDescriptor families +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],ColumnFamilyDescriptor families Maps column family name to the respective FamilyDescriptors @@ -724,7 +724,7 @@ implements ModifyableTableDescriptor @InterfaceAudience.Private -publicModifyableTableDescriptor(TableNamename) +publicModifyableTableDescriptor(TableNamename) Construct a table descriptor specifying a TableName object Parameters: @@ -739,7 +739,7 @@ public ModifyableTableDescriptor -privateModifyableTableDescriptor(TableDescriptordesc) +privateModifyableTableDescriptor(TableDescriptordesc) @@ -750,7 +750,7 @@ public@InterfaceAudience.Private http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicModifyableTableDescriptor(TableNamename, +publicModifyableTableDescriptor(TableNamename, TableDescriptordesc) Deprecated. Construct a table descriptor by cloning the descriptor passed as a @@ -771,7 +771,7 @@ public ModifyableTableDescriptor -privateModifyableTableDescriptor(TableNamename, +privateModifyableTableDescriptor(TableNamename, http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionColumnFamilyDescriptorfamilies, http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytesvalues,
[49/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html -- diff --git a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html index 8bc8e3a..f6c2d8b 100644 --- a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html +++ b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html @@ -1219,7 +1219,7 @@ implements NAMESPACE_FAMILY_INFO -public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String NAMESPACE_FAMILY_INFO +public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String NAMESPACE_FAMILY_INFO Deprecated. See Also: @@ -1233,7 +1233,7 @@ implements NAMESPACE_FAMILY_INFO_BYTES -public static finalbyte[] NAMESPACE_FAMILY_INFO_BYTES +public static finalbyte[] NAMESPACE_FAMILY_INFO_BYTES Deprecated. @@ -1243,7 +1243,7 @@ implements NAMESPACE_COL_DESC_BYTES -public static finalbyte[] NAMESPACE_COL_DESC_BYTES +public static finalbyte[] NAMESPACE_COL_DESC_BYTES Deprecated. @@ -1253,7 +1253,7 @@ implements NAMESPACE_TABLEDESC -public static finalHTableDescriptor NAMESPACE_TABLEDESC +public static finalHTableDescriptor NAMESPACE_TABLEDESC Deprecated. Table descriptor for namespace table @@ -1967,7 +1967,7 @@ implements hashCode -publicinthashCode() +publicinthashCode() Deprecated. Overrides: @@ -1983,7 +1983,7 @@ implements compareTo -publicintcompareTo(HTableDescriptorother) +publicintcompareTo(HTableDescriptorother) Deprecated. Compares the descriptor with another descriptor which is passed as a parameter. This compares the content of the two descriptors and not the reference. @@ -2003,7 +2003,7 @@ implements getFamilies http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionHColumnDescriptorgetFamilies() +publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionHColumnDescriptorgetFamilies() Deprecated.Use getColumnFamilies(). Returns an unmodifiable collection of all the HColumnDescriptor of all the column families of the table. @@ -2020,7 +2020,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti hasSerialReplicationScope -publicbooleanhasSerialReplicationScope() +publicbooleanhasSerialReplicationScope() Deprecated. Return true if there are at least one cf whose replication scope is serial. @@ -2038,7 +2038,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti getRegionReplication -publicintgetRegionReplication() +publicintgetRegionReplication() Deprecated. Returns the configured replicas per region @@ -2055,7 +2055,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti setRegionReplication -publicHTableDescriptorsetRegionReplication(intregionReplication) +publicHTableDescriptorsetRegionReplication(intregionReplication) Deprecated. Sets the number of replicas per region. @@ -2070,7 +2070,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti hasRegionMemstoreReplication -publicbooleanhasRegionMemstoreReplication() +publicbooleanhasRegionMemstoreReplication() Deprecated. Specified by: @@ -2086,7 +2086,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti setRegionMemstoreReplication -publicHTableDescriptorsetRegionMemstoreReplication(booleanmemstoreReplication) +publicHTableDescriptorsetRegionMemstoreReplication(booleanmemstoreReplication) Deprecated. Enable or Disable the memstore replication from the primary region to the replicas. The replication will be used only for meta operations (e.g. flush, compaction, ...) @@ -2105,7 +2105,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti setPriority -publicHTableDescriptorsetPriority(intpriority) +publicHTableDescriptorsetPriority(intpriority) Deprecated. @@ -2115,7 +2115,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti getPriority -publicintgetPriority() +publicintgetPriority() Deprecated. Specified by: @@ -2130,7 +2130,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/Collecti getFamiliesKeys http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publichttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true; title="class or interface in java.util">Setbyte[]getFamiliesKeys()
[30/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/TableDescriptors.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/TableDescriptors.html b/devapidocs/org/apache/hadoop/hbase/TableDescriptors.html index de8384d..a393931 100644 --- a/devapidocs/org/apache/hadoop/hbase/TableDescriptors.html +++ b/devapidocs/org/apache/hadoop/hbase/TableDescriptors.html @@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public interface TableDescriptors +public interface TableDescriptors Get, remove and modify table descriptors. Used by servers to host descriptors. @@ -129,34 +129,34 @@ public interface void -add(HTableDescriptorhtd) +add(TableDescriptorhtd) Add or update descriptor -HTableDescriptor +TableDescriptor get(TableNametableName) -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptor +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptor getAll() -Get Map of all HTableDescriptors. +Get Map of all TableDescriptors. -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptor +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptor getAllDescriptors() Get Map of all TableDescriptors. -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptor +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptor getByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Get Map of all NamespaceDescriptors for a given namespace. -HTableDescriptor +TableDescriptor remove(TableNametablename) @@ -192,8 +192,8 @@ public interface get -HTableDescriptorget(TableNametableName) - throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +TableDescriptorget(TableNametableName) + throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Parameters: tableName - @@ -210,8 +210,8 @@ public interface getByNamespace -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,HTableDescriptorgetByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) - throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,TableDescriptorgetByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Get Map of all NamespaceDescriptors for a given namespace. Returns: @@ -227,9 +227,9 @@ public interface getAll -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
[04/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html index 165301e..cd5c15e 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html +++ b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html @@ -248,14 +248,14 @@ extends Methods inherited from classorg.apache.hadoop.hbase.master.HMaster -abort, abortProcedure, addColumn, addReplicationPeer, balance, balance, balanceSwitch, canCreateBaseZNode, canUpdateTableDescriptor, checkIfShouldMoveSystemRegionAsync, checkInitialized, checkServiceStarted, checkTableModifiable, configureInfoServer, construct Master, createMetaBootstrap, createNamespace, createQuotaSnapshotNotifier, createRpcServices, createServerManager, createSystemTable, createTable, deleteColumn, deleteNamespace, deleteTable, disableReplicationPeer, disableTable, drainRegionServer, enableReplicationPeer, enableTable, getAssignmentManager, getAverageLoad, getCatalogJanitor, getClientIdAuditPrefix, getClusterSchema, getClusterStatus, getClusterStatus, getDumpServlet, getFavoredNodesManager, getFsTableDescriptors, getHFileCleaner, getInitializedEvent, getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion, getLoadBalancer, getLoadBalancerClassName, getLoadedCoprocessors, getLockManager, getLogCleaner, getMasterActiveTime, getMasterCoprocessorHost, getMasterCoprocessors, getMasterFileSystem, getMasterFinishedInitializationTime, getMasterMetrics, getMasterProcedureExecutor, getMasterProcedureManagerHost, getMasterQuotaManager, getMasterRpcServices, getMasterStartTime, ge tMasterWalManager, getMergePlanCount, getMobCompactionState, getNamespace, getNamespaces, getNumWALFiles, getProcessName, getQuotaObserverChore, getRegionNormalizer, getRegionNormalizerTracker, getRegionServerFatalLogBuffer, getRegionServerInfoPort, getRegionServerVersion, getRemoteInetAddress, getReplicationPeerConfig, getServerCrashProcessingEnabledEvent, getServerManager, getServerName, getSnapshotManager, getSpaceQuotaSnapshotNotifier, getSplitOrMergeTracker, getSplitPlanCount, getTableDescriptors, getTableRegionForRow, getTableStateManager, getWalProcedureStore, getZooKeeper, initClusterSchemaService, initializeZKBasedSystemTrackers, initQuotaManager, isActiveMaster, isBalancerOn, isCatalogJanitorEnabled, isCleanerChoreEnabled, isInitialized, isInMaintenanceMode, isNormalizerOn, isServerCrashProcessingEnabled, isSplitOrMergeEnabled, listDrainingRegionServers, listLocks, listProcedures, listReplicationPeers, listTableDescriptors, listTableDescriptorsByNamespace, listTableNames, listTableNamesByNamespace, login, main, mergeRegions, modifyColumn, modifyNamespace, modifyTable, move, normalizeRegions, recoverMeta, registerService, removeDrainFromRegionServer, removeReplicationPeer, reportMobCompactionEnd, reportMobCompactionStart, requestMobCompa ction, restoreSnapshot, setCatalogJanitorEnabled, setInitialized, setServerCrashProcessingEnabled, shutdown, splitRegion, stopMaster, stopServiceThreads, truncateTable, updateConfigurationForSpaceQuotaObserver, updateReplicationPeerConfig, waitForMasterActive +abort, abortProcedure, addColumn, addReplicationPeer, balance, balance, balanceSwitch, canCreateBaseZNode, canUpdateTableDescriptor, checkIfShouldMoveSystemRegionAsync, checkInitialized, checkServiceStarted, checkTableModifiable, configureInfoServer, constructMaster, createMetaBootstrap, createNamespace, createQuotaSnapshotNotifier, createRpcServices, createServerManager, createSystemTable, createTable, deleteColumn, deleteNamespace, deleteTable, disableReplicationPeer, disableTable, drainRegionServer, enableReplicationPeer, enableTable, getAssignmentManager, getAverageLoad, getCatalogJanitor, getClientIdAuditPrefix, getClusterSchema, getClusterStatus, getClusterStatus, getDumpServlet, getFavoredNodesManager, getHFileCleaner, getInitializedEvent, getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion, getLoadBalancer, getLoadBalancerClassName, getLoadedCoprocessors, getLockManager, getLogCleaner, getMasterActiveTime, getMasterCoprocessorHost, getMasterCoprocessors, getMasterFileSystem, getMasterFinishedInitializationTime, getMasterMetrics, getMasterProcedureExecutor, getMasterProcedureManagerHost, getMasterQuotaManager, getMasterRpcServices, getMasterStartTime, getMasterWalManager, getMergePlanCount, getMetaTableObserver, getMobCompactionState, getNamespace, getNamespaces, getNumWALFiles, getProcessName,
[12/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html b/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html index afdff4c..8344cf4 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html +++ b/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":41,"i19":42,"i20":42,"i21":42,"i22":42,"i23":42,"i24":42}; +var methods = {"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":41,"i18":42,"i19":42,"i20":42,"i21":42,"i22":42}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -public class ReplicationAdmin +public class ReplicationAdmin extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true; title="class or interface in java.io">Closeable @@ -189,18 +189,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h -private ReplicationPeers -replicationPeers -Deprecated. - - - -private ReplicationQueuesClient -replicationQueuesClient -Deprecated. - - - static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String REPLICATIONSERIAL Deprecated. @@ -218,13 +206,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h Deprecated. - -private ZooKeeperWatcher -zkw -Deprecated. -A watcher used by replicationPeers and replicationQueuesClient. - - @@ -315,12 +296,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h -private ZooKeeperWatcher -createZooKeeperWatcher() -Deprecated. - - - void disablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid) Deprecated. @@ -329,7 +304,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + void disableTableRep(TableNametableName) Deprecated. @@ -338,7 +313,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + void enablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid) Deprecated. @@ -347,7 +322,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + void enableTableRep(TableNametableName) Deprecated. @@ -356,7 +331,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + ReplicationPeerConfig getPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid) Deprecated. @@ -365,19 +340,19 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + int getPeersCount() Deprecated. - + boolean getPeerState(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid) Deprecated. - + http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String getPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringid) Deprecated. @@ -386,7 +361,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,ReplicationPeerConfig listPeerConfigs() Deprecated. @@ -394,7 +369,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h - + http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in
[02/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html index e93b0e3..bd1e0fe 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class MasterRpcServices +public class MasterRpcServices extends RSRpcServices implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService.BlockingInterface Implements the master RPC services. @@ -736,7 +736,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG @@ -745,7 +745,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master master -private finalHMaster master +private finalHMaster master @@ -762,7 +762,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master MasterRpcServices -publicMasterRpcServices(HMasterm) +publicMasterRpcServices(HMasterm) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Throws: @@ -784,7 +784,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master createConfigurationSubset -privateorg.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.BuildercreateConfigurationSubset() +privateorg.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.BuildercreateConfigurationSubset() Returns: Subset of configuration to pass initializing regionservers: e.g. @@ -798,7 +798,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master addConfig -privateorg.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.BuilderaddConfig(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builderresp, +privateorg.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.BuilderaddConfig(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builderresp, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) @@ -808,7 +808,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master createPriority -protectedPriorityFunctioncreatePriority() +protectedPriorityFunctioncreatePriority() Overrides: createPriorityin classRSRpcServices @@ -821,7 +821,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master switchBalancer -booleanswitchBalancer(booleanb, +booleanswitchBalancer(booleanb, MasterRpcServices.BalanceSwitchModemode) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Assigns balancer switch according to BalanceSwitchMode @@ -842,7 +842,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master synchronousBalanceSwitch -booleansynchronousBalanceSwitch(booleanb) +booleansynchronousBalanceSwitch(booleanb) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Throws: @@ -856,7 +856,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master normalizerSwitch -publicbooleannormalizerSwitch(booleanon) +publicbooleannormalizerSwitch(booleanon) Sets normalizer on/off flag in ZK. @@ -866,7 +866,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master getServices -protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListRpcServer.BlockingServiceAndInterfacegetServices() +protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or
[48/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html -- diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html index 4fed5d0..4a40934 100644 --- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html +++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html @@ -409,63 +409,70 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. TableName +AsyncBufferedMutator.getName() +Gets the fully qualified table name instance of the table that this + AsyncBufferedMutator writes to. + + + +TableName AsyncTableRegionLocator.getName() Gets the fully qualified table name instance of the table whose region we want to locate. - + TableName Table.getName() Gets the fully qualified table name instance of this table. - + TableName SnapshotDescription.getTableName() - + TableName TableDescriptor.getTableName() Get the name of the table - + TableName BufferedMutatorParams.getTableName() - + TableName[] Admin.listTableNames() List all of the names of userspace tables. - + TableName[] Admin.listTableNames(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true; title="class or interface in java.util.regex">Patternpattern) List all of the names of userspace tables. - + TableName[] Admin.listTableNames(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true; title="class or interface in java.util.regex">Patternpattern, booleanincludeSysTables) List all of the names of userspace tables. - + TableName[] Admin.listTableNames(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringregex) List all of the names of userspace tables. - + TableName[] Admin.listTableNames(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringregex, booleanincludeSysTables) List all of the names of userspace tables. - + TableName[] Admin.listTableNamesByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Get list of table names by namespace @@ -614,6 +621,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. +static TableDescriptor +TableDescriptorBuilder.copy(TableNamename, +TableDescriptordesc) + + void Admin.deleteColumn(TableNametableName, byte[]columnFamily) @@ -625,152 +637,178 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void AsyncAdmin.deleteColumnFamily(TableNametableName, byte[]columnFamily) Delete a column family from a table. - + void Admin.deleteColumnFamily(TableNametableName, byte[]columnFamily) Delete a column family from a table. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void Admin.deleteColumnFamilyAsync(TableNametableName, byte[]columnFamily) Delete a column family from a table. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void AsyncAdmin.deleteTable(TableNametableName) Deletes a table. - + void Admin.deleteTable(TableNametableName) Deletes a table. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void Admin.deleteTableAsync(TableNametableName) Deletes the table but does not block and wait for it be completely removed. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void
[17/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html index 2e92f44..1455aba 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html +++ b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":10,"i7":9,"i8":9,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":9}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":9,"i7":10,"i8":9,"i9":9,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":42,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class TableDescriptorBuilder +public class TableDescriptorBuilder extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object @@ -429,117 +429,122 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? copy(TableDescriptordesc) +static TableDescriptor +copy(TableNamename, +TableDescriptordesc) + + TableDescriptorBuilder modifyColumnFamily(ColumnFamilyDescriptorfamily) - + static TableDescriptorBuilder newBuilder(TableDescriptordesc) Copy all configuration, values, families, and name from the input. - + static TableDescriptorBuilder newBuilder(TableNamename) - + static TableDescriptor parseFrom(byte[]pbBytes) The input should be created by toByteArray(org.apache.hadoop.hbase.client.TableDescriptor). - + TableDescriptorBuilder removeColumnFamily(byte[]name) - + TableDescriptorBuilder removeConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) - + TableDescriptorBuilder removeCoprocessor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringclassName) - + TableDescriptorBuilder removeValue(byte[]key) - + TableDescriptorBuilder removeValue(Byteskey) - + TableDescriptorBuilder setCompactionEnabled(booleanisEnable) - + TableDescriptorBuilder setConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringvalue) - + TableDescriptorBuilder setDurability(Durabilitydurability) - + TableDescriptorBuilder setFlushPolicyClassName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringclazz) - + TableDescriptorBuilder setMaxFileSize(longmaxFileSize) - + TableDescriptorBuilder setMemStoreFlushSize(longmemstoreFlushSize) - + TableDescriptorBuilder setNormalizationEnabled(booleanisEnable) - + TableDescriptorBuilder setOwner(Userowner) Deprecated. - + TableDescriptorBuilder setOwnerString(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringownerString) Deprecated. - + TableDescriptorBuilder setPriority(intpriority) - + TableDescriptorBuilder setReadOnly(booleanreadOnly) - + TableDescriptorBuilder setRegionMemstoreReplication(booleanmemstoreReplication) - + TableDescriptorBuilder setRegionReplication(intregionReplication) - + TableDescriptorBuilder setRegionSplitPolicyClassName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringclazz) - + TableDescriptorBuilder setValue(byte[]key, byte[]value) - + TableDescriptorBuilder setValue(Byteskey, Bytesvalue) - + static byte[] toByteArray(TableDescriptordesc) @@ -571,7 +576,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -public static finalorg.apache.commons.logging.Log LOG +public static finalorg.apache.commons.logging.Log LOG @@ -581,7 +586,7 @@ extends
[06/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html index 3454f10..2c49400 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html @@ -273,12 +273,12 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.io.hfile.BlockType +org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory -org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches org.apache.hadoop.hbase.io.hfile.BlockPriority +org.apache.hadoop.hbase.io.hfile.BlockType http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html index c426e00..e6dd2ac 100644 --- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html @@ -341,9 +341,9 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage org.apache.hadoop.hbase.ipc.CallEvent.Type +org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.InputSplit.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.InputSplit.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.InputSplit.html index 75fb5f2..d668a1f 100644 --- a/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.InputSplit.html +++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.InputSplit.html @@ -140,7 +140,7 @@ implements org.apache.hadoop.io.Writable Field and Description -private HTableDescriptor +private TableDescriptor htd @@ -177,7 +177,7 @@ implements org.apache.hadoop.io.Writable InputSplit() -InputSplit(HTableDescriptorhtd, +InputSplit(TableDescriptorhtd, HRegionInforegionInfo, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringlocations, Scanscan, @@ -199,7 +199,7 @@ implements org.apache.hadoop.io.Writable Method and Description -HTableDescriptor +TableDescriptor getHtd() @@ -223,7 +223,7 @@ implements org.apache.hadoop.io.Writable getScan() -HTableDescriptor +TableDescriptor getTableDescriptor() @@ -262,7 +262,7 @@ implements org.apache.hadoop.io.Writable htd -privateHTableDescriptor htd +privateTableDescriptor htd @@ -318,13 +318,13 @@ implements org.apache.hadoop.io.Writable publicInputSplit() - + InputSplit -publicInputSplit(HTableDescriptorhtd, +publicInputSplit(TableDescriptorhtd, HRegionInforegionInfo, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringlocations, Scanscan, @@ -345,7 +345,7 @@ implements org.apache.hadoop.io.Writable getHtd -publicHTableDescriptorgetHtd() +publicTableDescriptorgetHtd() @@
[35/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/dependency-convergence.html -- diff --git a/dependency-convergence.html b/dependency-convergence.html index 6b5261a..914c91d 100644 --- a/dependency-convergence.html +++ b/dependency-convergence.html @@ -7,7 +7,7 @@ - + Apache HBase Reactor Dependency Convergence @@ -287,13 +287,13 @@ Number of modules: -37 +39 Number of dependencies (NOD): -304 +306 Number of unique artifacts (NOA): -329 +331 Number of version-conflicting artifacts (NOC): 16 @@ -516,24 +516,26 @@ + 3.4.9 -org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||+-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate)||\-org.apache.curator:curator-client:jar:2.12.0:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.8; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.9:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.1:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.1:compile|| 60;\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.1:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.1:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.1:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.1:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zo okeeper:jar:3.4.9:test - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.1:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.1:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.9:test - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.1:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.1:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.9:test - version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate) -org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test|+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.9:runtime - version managed from 3.4.6; scope updated from test; omitted for duplicate)|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.9:runtime - version managed from 3.4.6; scope updated from test; omitted for duplicate)|+-org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.7.1:test||\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.1:test||\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.1:test|| 60;\-(org.apache.zookeeper:zookeeper:jar:3.4.9:test - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.1:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.1:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.9:runtime - version managed from 3.4.6; scope updated from test; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.1:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.1:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.9:test - version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile ;+-org.apache.zookeeper:zookeeper:jar:3.4.9:compile+-org.apache.curator:curator-client:jar:2.12.0:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.8; omitted for duplicate)+-org.apache.hadoop:hadoop-auth:jar:2.7.1:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate)\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.6; omitted for duplicate) -org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-org.apache.zookeeper:zookeeper:jar:3.4.9:compile|\-org.apache.curator:curator-client:jar:2.12.0:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.9:compile - version managed from 3.4.8; omitted for
[09/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html index 5fca8ad..39e3ac8 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -259,35 +259,47 @@ implements long +getCurrentDataSize() +Returns the occupied size of data blocks, in bytes. + + + +long getCurrentSize() Returns the occupied size of the block cache, in bytes. - + +long +getDataBlockCount() +Returns the number of data blocks currently cached in the block cache. + + + long getFreeSize() Returns the free size of the block cache, in bytes. - + int getRefCount(BlockCacheKeycacheKey) - + CacheStats getStats() Get the statistics for this block cache. - + long heapSize() - + http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true; title="class or interface in java.util">IteratorCachedBlock iterator() - + void returnBlock(BlockCacheKeycacheKey, Cacheableblock) @@ -295,19 +307,19 @@ implements + void setMaxSize(longsize) Sets the max heap size that can be used by the BlockCache. - + void shutdown() Shutdown the cache. - + long size() Returns the total size of the block cache, in bytes. @@ -566,13 +578,30 @@ implements + + + + +getCurrentDataSize +publiclonggetCurrentDataSize() +Description copied from interface:BlockCache +Returns the occupied size of data blocks, in bytes. + +Specified by: +getCurrentDataSizein interfaceBlockCache +Returns: +occupied space in cache, in bytes + + + getFreeSize -publiclonggetFreeSize() +publiclonggetFreeSize() Description copied from interface:BlockCache Returns the free size of the block cache, in bytes. @@ -589,7 +618,7 @@ implements getCurrentSize -publiclonggetCurrentSize() +publiclonggetCurrentSize() Description copied from interface:BlockCache Returns the occupied size of the block cache, in bytes. @@ -606,7 +635,7 @@ implements getBlockCount -publiclonggetBlockCount() +publiclonggetBlockCount() Description copied from interface:BlockCache Returns the number of blocks currently cached in the block cache. @@ -617,13 +646,30 @@ implements + + + + +getDataBlockCount +publiclonggetDataBlockCount() +Description copied from interface:BlockCache +Returns the number of data blocks currently cached in the block cache. + +Specified by: +getDataBlockCountin interfaceBlockCache +Returns: +number of blocks in the cache + + + iterator -publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true; title="class or interface in java.util">IteratorCachedBlockiterator() +publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true; title="class or interface in java.util">IteratorCachedBlockiterator() Specified by: http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true#iterator--; title="class or interface in java.lang">iteratorin interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true; title="class or interface in java.lang">IterableCachedBlock @@ -640,7 +686,7 @@ implements getBlockCaches -publicBlockCache[]getBlockCaches() +publicBlockCache[]getBlockCaches() Specified by: getBlockCachesin interfaceBlockCache @@ -655,7 +701,7 @@ implements setMaxSize -publicvoidsetMaxSize(longsize) +publicvoidsetMaxSize(longsize) Description copied from interface:ResizableBlockCache Sets the max heap size that can be used by the BlockCache. @@ -672,7 +718,7 @@ implements returnBlock -publicvoidreturnBlock(BlockCacheKeycacheKey, +publicvoidreturnBlock(BlockCacheKeycacheKey, Cacheableblock) Description copied from interface:BlockCache Called when the scanner using the block decides to return the block once its usage @@ -696,7 +742,7 @@ implements getRefCount -publicintgetRefCount(BlockCacheKeycacheKey) +publicintgetRefCount(BlockCacheKeycacheKey)
[34/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/dependency-info.html -- diff --git a/dependency-info.html b/dependency-info.html index 1cfa712..a3ffd82 100644 --- a/dependency-info.html +++ b/dependency-info.html @@ -7,7 +7,7 @@ - + Apache HBase Dependency Information @@ -318,7 +318,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-08-23 + Last Published: 2017-08-24 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/dependency-management.html -- diff --git a/dependency-management.html b/dependency-management.html index cb0b399..325768d 100644 --- a/dependency-management.html +++ b/dependency-management.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependency Management @@ -558,215 +558,221 @@ https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 org.apache.hbase -http://hbase.apache.org/hbase-build-configuration/hbase-rsgroup;>hbase-rsgroup +http://hbase.apache.org/hbase-build-configuration/hbase-replication;>hbase-replication 3.0.0-SNAPSHOT jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 org.apache.hbase -http://hbase.apache.org/hbase-build-configuration/hbase-server;>hbase-server +http://hbase.apache.org/hbase-build-configuration/hbase-rsgroup;>hbase-rsgroup 3.0.0-SNAPSHOT jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 org.apache.hbase -http://hbase.apache.org/hbase-build-configuration/hbase-shell;>hbase-shell +http://hbase.apache.org/hbase-build-configuration/hbase-server;>hbase-server 3.0.0-SNAPSHOT jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 org.apache.hbase -http://hbase.apache.org/hbase-build-configuration/hbase-thrift;>hbase-thrift +http://hbase.apache.org/hbase-build-configuration/hbase-shell;>hbase-shell 3.0.0-SNAPSHOT jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 +org.apache.hbase +http://hbase.apache.org/hbase-build-configuration/hbase-thrift;>hbase-thrift +3.0.0-SNAPSHOT +jar +https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 + org.apache.hbase.thirdparty http://hbase.apache.org/hbase-shaded-miscellaneous;>hbase-shaded-miscellaneous 1.0.0 jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.hbase.thirdparty http://hbase.apache.org/hbase-shaded-netty;>hbase-shaded-netty 1.0.0 jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.hbase.thirdparty http://hbase.apache.org/hbase-shaded-protobuf;>hbase-shaded-protobuf 1.0.0 jar https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.htrace http://incubator.apache.org/projects/htrace.html;>htrace-core 3.2.0-incubating jar http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software License, Version 2.0 - + org.apache.httpcomponents http://hc.apache.org/httpcomponents-client;>httpclient 4.5.3 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.httpcomponents http://hc.apache.org/httpcomponents-core-ga;>httpcore 4.4.6 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.kerby http://directory.apache.org/kerby/kerby-kerb/kerb-client;>kerb-client 1.0.0-RC2 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.kerby http://directory.apache.org/kerby/kerby-kerb/kerb-simplekdc;>kerb-simplekdc 1.0.0-RC2 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 2.0 - + org.apache.thrift http://thrift.apache.org;>libthrift 0.9.3 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software License, Version 2.0 - + org.apache.zookeeper zookeeper 3.4.9 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software License, Version 2.0 - + org.codehaus.jackson http://jackson.codehaus.org;>jackson-core-asl 1.9.13 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software License, Version 2.0 - + org.codehaus.jackson http://jackson.codehaus.org;>jackson-jaxrs 1.9.13 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software License, Version 2.0, http://www.fsf.org/licensing/licenses/lgpl.txt;>GNU Lesser General Public License (LGPL), Version 2.1 - + org.codehaus.jackson http://jackson.codehaus.org;>jackson-mapper-asl 1.9.13 jar http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software License, Version 2.0 - +
[32/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/index-all.html -- diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html index e707c34..1739525 100644 --- a/devapidocs/index-all.html +++ b/devapidocs/index-all.html @@ -1134,7 +1134,7 @@ add(Cell) - Method in class org.apache.hadoop.hbase.security.visibility.VisibilityScanDeleteTracker -add(HTableDescriptor) - Method in interface org.apache.hadoop.hbase.TableDescriptors +add(TableDescriptor) - Method in interface org.apache.hadoop.hbase.TableDescriptors Add or update descriptor @@ -1176,7 +1176,7 @@ Deprecated. -add(HTableDescriptor) - Method in class org.apache.hadoop.hbase.util.FSTableDescriptors +add(TableDescriptor) - Method in class org.apache.hadoop.hbase.util.FSTableDescriptors Adds (or updates) the table descriptor to the FileSystem and updates the local cache with it. @@ -1408,11 +1408,11 @@ Parses a combined family and qualifier and adds either both or just the family in case there is no qualifier. -addColumn(TableName, HColumnDescriptor, long, long) - Method in class org.apache.hadoop.hbase.master.HMaster +addColumn(TableName, ColumnFamilyDescriptor, long, long) - Method in class org.apache.hadoop.hbase.master.HMaster addColumn(RpcController, MasterProtos.AddColumnRequest) - Method in class org.apache.hadoop.hbase.master.MasterRpcServices -addColumn(TableName, HColumnDescriptor, long, long) - Method in interface org.apache.hadoop.hbase.master.MasterServices +addColumn(TableName, ColumnFamilyDescriptor, long, long) - Method in interface org.apache.hadoop.hbase.master.MasterServices Add a new column to an existing table @@ -1460,9 +1460,9 @@ AddColumnFamilyProcedure() - Constructor for class org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure -AddColumnFamilyProcedure(MasterProcedureEnv, TableName, HColumnDescriptor) - Constructor for class org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure +AddColumnFamilyProcedure(MasterProcedureEnv, TableName, ColumnFamilyDescriptor) - Constructor for class org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure -AddColumnFamilyProcedure(MasterProcedureEnv, TableName, HColumnDescriptor, ProcedurePrepareLatch) - Constructor for class org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure +AddColumnFamilyProcedure(MasterProcedureEnv, TableName, ColumnFamilyDescriptor, ProcedurePrepareLatch) - Constructor for class org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure AddColumnFamilyProcedureBiConsumer(AsyncAdmin, TableName) - Constructor for class org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer @@ -2126,7 +2126,7 @@ Add regions to hbase:meta table. -addRegionsToMeta(MasterProcedureEnv, HTableDescriptor, ListHRegionInfo) - Static method in class org.apache.hadoop.hbase.master.procedure.CreateTableProcedure +addRegionsToMeta(MasterProcedureEnv, TableDescriptor, ListHRegionInfo) - Static method in class org.apache.hadoop.hbase.master.procedure.CreateTableProcedure Add the specified set of regions to the hbase:meta table. @@ -2174,7 +2174,7 @@ addReplicaActionsAgain(Action, MapServerName, MultiAction) - Method in class org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable -addReplicas(MasterProcedureEnv, HTableDescriptor, ListHRegionInfo) - Static method in class org.apache.hadoop.hbase.master.procedure.CreateTableProcedure +addReplicas(MasterProcedureEnv, TableDescriptor, ListHRegionInfo) - Static method in class org.apache.hadoop.hbase.master.procedure.CreateTableProcedure Create any replicas for the regions (the default replicas that was already created is passed to the method) @@ -2386,7 +2386,7 @@ addTable(TableName) - Method in class org.apache.hadoop.hbase.rsgroup.RSGroupInfo -addTableDescriptor(HTableDescriptor) - Method in class org.apache.hadoop.hbase.snapshot.SnapshotManifest +addTableDescriptor(TableDescriptor) - Method in class org.apache.hadoop.hbase.snapshot.SnapshotManifest Add the table descriptor to the snapshot manifest @@ -2398,7 +2398,7 @@ addTables(TableName[]) - Method in class org.apache.hadoop.hbase.backup.BackupInfo -addTableToMeta(MasterProcedureEnv, HTableDescriptor, ListHRegionInfo) - Static method in class org.apache.hadoop.hbase.master.procedure.CreateTableProcedure +addTableToMeta(MasterProcedureEnv, TableDescriptor, ListHRegionInfo) - Static method in class org.apache.hadoop.hbase.master.procedure.CreateTableProcedure addTagPart(Cell) - Method in class org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder @@ -3846,6 +3846,26 @@ AsyncBatchRpcRetryingCaller.ServerRequest - Class in org.apache.hadoop.hbase.client +AsyncBufferedMutator - Interface in org.apache.hadoop.hbase.client + +Used to communicate with a
[31/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html index 444c7b2..0ce31fd 100644 --- a/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html @@ -2979,7 +2979,7 @@ public hashCode -publicinthashCode() +publicinthashCode() Deprecated. Overrides: @@ -2995,7 +2995,7 @@ public compareTo -publicintcompareTo(HColumnDescriptorother) +publicintcompareTo(HColumnDescriptorother) Deprecated. Specified by: @@ -3009,7 +3009,7 @@ public toByteArray -publicbyte[]toByteArray() +publicbyte[]toByteArray() Deprecated. Returns: @@ -3025,7 +3025,7 @@ public parseFrom -public staticHColumnDescriptorparseFrom(byte[]bytes) +public staticHColumnDescriptorparseFrom(byte[]bytes) throws DeserializationException Deprecated. @@ -3046,7 +3046,7 @@ public getConfigurationValue -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetConfigurationValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetConfigurationValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) Deprecated. Specified by: @@ -3064,7 +3064,7 @@ public getConfiguration -publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetConfiguration() +publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetConfiguration() Deprecated. Specified by: @@ -3080,7 +3080,7 @@ public setConfiguration -publicHColumnDescriptorsetConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey, +publicHColumnDescriptorsetConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringvalue) Deprecated. Setter for storing a configuration setting. @@ -3097,7 +3097,7 @@ public removeConfiguration -publicHColumnDescriptorremoveConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) +publicHColumnDescriptorremoveConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringkey) Deprecated. Remove a configuration setting represented by the key. @@ -3108,7 +3108,7 @@ public getEncryptionType -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetEncryptionType() +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetEncryptionType() Deprecated. Specified by: @@ -3124,7 +3124,7 @@ public setEncryptionType -publicHColumnDescriptorsetEncryptionType(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringvalue) +publicHColumnDescriptorsetEncryptionType(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringvalue) Deprecated. Set the encryption algorithm for use with this family @@ -3139,7 +3139,7 @@ public getEncryptionKey -publicbyte[]getEncryptionKey() +publicbyte[]getEncryptionKey() Deprecated. Specified by: @@ -3155,7 +3155,7 @@ public setEncryptionKey -publicHColumnDescriptorsetEncryptionKey(byte[]value) +publicHColumnDescriptorsetEncryptionKey(byte[]value) Deprecated. Set the raw crypto key attribute for the family
[23/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html index b5b8b17..ea5ecac 100644 --- a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html +++ b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html @@ -1612,6 +1612,18 @@ service. (package private) class +AsyncBufferedMutatorBuilderImpl +The implementation of AsyncBufferedMutatorBuilder. + + + +(package private) class +AsyncBufferedMutatorImpl +The implementation of AsyncBufferedMutator. + + + +(package private) class AsyncClientScanner The asynchronous client scanner implementation. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html index 3065896..7d4232e 100644 --- a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html +++ b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html @@ -660,6 +660,18 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. interface +AsyncBufferedMutator +Used to communicate with a single HBase table in batches. + + + +interface +AsyncBufferedMutatorBuilder +For creating AsyncBufferedMutator. + + + +interface AsyncConnection The asynchronous version of Connection. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html index e9aa763..f9ebb7f 100644 --- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html @@ -88,12 +88,12 @@ Annotation Type Hierarchy -org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) -org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) -org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) -org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) +org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) +org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation) +org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true; title="class or interface in java.lang.annotation">Annotation)
[36/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/checkstyle.rss -- diff --git a/checkstyle.rss b/checkstyle.rss index c071262..0122a8b 100644 --- a/checkstyle.rss +++ b/checkstyle.rss @@ -25,8 +25,8 @@ under the License. en-us 2007 - 2017 The Apache Software Foundation - File: 2026, - Errors: 12821, + File: 2030, + Errors: 12845, Warnings: 0, Infos: 0 @@ -69,7 +69,7 @@ under the License. 0 - 2 + 1 @@ -704,6 +704,20 @@ under the License. + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.AsyncBufferedMutatorBuilderImpl.java;>org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java + + + 0 + + + 0 + + + 0 + + + + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.ObjectPool.java;>org/apache/hadoop/hbase/util/ObjectPool.java @@ -2253,7 +2267,7 @@ under the License. 0 - 12 + 15 @@ -5011,7 +5025,7 @@ under the License. 0 - 30 + 28 @@ -5417,7 +5431,7 @@ under the License. 0 - 2 + 3 @@ -6822,6 +6836,20 @@ under the License. + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.AsyncBufferedMutatorImpl.java;>org/apache/hadoop/hbase/client/AsyncBufferedMutatorImpl.java + + + 0 + + + 0 + + + 0 + + + + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceFactory.java;>org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactory.java @@ -6976,7 +7004,7 @@ under the License. - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.backup.impl.TableBackupClient.java;>org/apache/hadoop/hbase/backup/impl/TableBackupClient.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher.java;>org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java 0 @@ -6985,12 +7013,12 @@ under the License. 0 - 13 + 6 - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher.java;>org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.backup.impl.TableBackupClient.java;>org/apache/hadoop/hbase/backup/impl/TableBackupClient.java 0 @@ -6999,7 +7027,7 @@ under the License. 0 - 6 + 13 @@ -7055,7 +7083,7 @@ under the License. 0 - 15 + 24 @@ -7158,7 +7186,7 @@ under the License. - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.backup.BackupTableInfo.java;>org/apache/hadoop/hbase/backup/BackupTableInfo.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.IncrementCoalescer.java;>org/apache/hadoop/hbase/thrift/IncrementCoalescer.java 0 @@ -7167,12 +7195,12 @@ under the
[51/51] [partial] hbase-site git commit: Published site at .
Published site at . Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/4f1f2a0b Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/4f1f2a0b Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/4f1f2a0b Branch: refs/heads/asf-site Commit: 4f1f2a0b18b050b8f7edfab7489acdc9477412c8 Parents: 5bd98ad Author: jenkinsAuthored: Thu Aug 24 15:10:16 2017 + Committer: jenkins Committed: Thu Aug 24 15:10:16 2017 + -- acid-semantics.html | 4 +- apache_hbase_reference_guide.pdf| 8 +- apidocs/allclasses-frame.html | 2 + apidocs/allclasses-noframe.html | 2 + apidocs/deprecated-list.html|73 +- apidocs/index-all.html |95 +- .../apache/hadoop/hbase/HColumnDescriptor.html |48 +- .../apache/hadoop/hbase/HTableDescriptor.html |84 +- apidocs/org/apache/hadoop/hbase/ServerName.html |12 +- .../hadoop/hbase/class-use/ServerName.html | 4 +- .../hadoop/hbase/class-use/TableName.html | 210 +- .../hadoop/hbase/client/AsyncAdminBuilder.html | 4 +- .../hbase/client/AsyncBufferedMutator.html | 359 + .../client/AsyncBufferedMutatorBuilder.html | 365 + .../hadoop/hbase/client/AsyncConnection.html| 115 +- .../hbase/client/TableDescriptorBuilder.html| 154 +- .../hbase/client/TableSnapshotScanner.html |12 +- .../client/class-use/AsyncBufferedMutator.html | 182 + .../class-use/AsyncBufferedMutatorBuilder.html | 223 + .../hadoop/hbase/client/class-use/Mutation.html |12 + .../hbase/client/class-use/TableDescriptor.html |34 +- .../hadoop/hbase/client/package-frame.html | 2 + .../hadoop/hbase/client/package-summary.html|12 + .../hadoop/hbase/client/package-tree.html | 2 + .../apache/hadoop/hbase/client/package-use.html |10 + .../client/replication/ReplicationAdmin.html|90 +- .../class-use/ReplicationException.html | 8 +- apidocs/overview-tree.html | 3 + .../apache/hadoop/hbase/HColumnDescriptor.html | 363 +- .../apache/hadoop/hbase/HTableDescriptor.html | 769 +- .../org/apache/hadoop/hbase/ServerName.html | 4 +- .../hbase/client/AsyncBufferedMutator.html | 156 + .../client/AsyncBufferedMutatorBuilder.html | 157 + .../hadoop/hbase/client/AsyncConnection.html|41 +- .../client/ColumnFamilyDescriptorBuilder.html | 421 +- .../hbase/client/TableDescriptorBuilder.html| 2724 +-- .../hbase/client/TableSnapshotScanner.html | 319 +- .../client/replication/ReplicationAdmin.html| 1008 +- book.html | 4 +- bulk-loads.html | 4 +- checkstyle-aggregate.html | 16368 + checkstyle.rss | 188 +- coc.html| 4 +- cygwin.html | 4 +- dependencies.html | 4 +- dependency-convergence.html |44 +- dependency-info.html| 4 +- dependency-management.html |98 +- devapidocs/allclasses-frame.html| 4 + devapidocs/allclasses-noframe.html | 4 + devapidocs/constant-values.html | 6 +- devapidocs/deprecated-list.html | 227 +- devapidocs/index-all.html | 460 +- .../apache/hadoop/hbase/HColumnDescriptor.html |48 +- .../apache/hadoop/hbase/HTableDescriptor.html |84 +- .../org/apache/hadoop/hbase/ServerName.html |12 +- .../apache/hadoop/hbase/TableDescriptors.html |46 +- .../hadoop/hbase/backup/package-tree.html | 4 +- .../hadoop/hbase/backup/util/RestoreTool.html |78 +- .../org/apache/hadoop/hbase/class-use/Cell.html | 225 +- .../hbase/class-use/HColumnDescriptor.html | 349 - .../hadoop/hbase/class-use/HRegionInfo.html | 120 +- .../hbase/class-use/HTableDescriptor.html | 1208 +- .../apache/hadoop/hbase/class-use/Server.html |16 +- .../hadoop/hbase/class-use/ServerName.html | 4 +- .../hbase/class-use/TableDescriptors.html | 6 +- .../hadoop/hbase/class-use/TableName.html | 357 +- .../class-use/InterfaceAudience.Private.html|12 + .../class-use/InterfaceAudience.Public.html |12 + .../hbase/classification/package-tree.html | 8 +- ...yncBatchRpcRetryingCaller.ServerRequest.html | 4 +- .../hbase/client/AsyncBufferedMutator.html | 363 + .../client/AsyncBufferedMutatorBuilder.html | 369
[38/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/book.html -- diff --git a/book.html b/book.html index c54af55..ffd1ebc 100644 --- a/book.html +++ b/book.html @@ -4804,7 +4804,7 @@ Some configurations would only appear in source code; the only way to identify t Default -10 +16 @@ -35069,7 +35069,7 @@ The server will return cellblocks compressed using this same compressor as long Version 3.0.0-SNAPSHOT -Last updated 2017-08-23 14:29:41 UTC +Last updated 2017-08-24 14:29:38 UTC http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/bulk-loads.html -- diff --git a/bulk-loads.html b/bulk-loads.html index e2a45d8..fc6727b 100644 --- a/bulk-loads.html +++ b/bulk-loads.html @@ -7,7 +7,7 @@ - + Apache HBase Bulk Loads in Apache HBase (TM) @@ -311,7 +311,7 @@ under the License. --> https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-08-23 + Last Published: 2017-08-24
[24/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html index 592bd87..9754da6 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html @@ -538,7 +538,7 @@ service. -HTableDescriptor +TableDescriptor TableDescriptors.get(TableNametableName) @@ -643,7 +643,7 @@ service. TableName.isMetaTableName(TableNametn) -HTableDescriptor +TableDescriptor TableDescriptors.remove(TableNametablename) @@ -1827,12 +1827,12 @@ service. private void -RestoreTool.checkAndCreateTable(Connectionconn, +RestoreTool.checkAndCreateTable(Connectionconn, org.apache.hadoop.fs.PathtableBackupPath, TableNametableName, TableNametargetTableName, http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListorg.apache.hadoop.fs.PathregionDirList, - HTableDescriptorhtd, + TableDescriptorhtd, booleantruncateIfExists) Prepare the table for bulkload, most codes copied from LoadIncrementalHFiles.createTable(TableName, String, Admin) @@ -1905,13 +1905,13 @@ service. -(package private) HTableDescriptor +(package private) TableDescriptor RestoreTool.getTableDesc(TableNametableName) Get table descriptor -private HTableDescriptor +private TableDescriptor RestoreTool.getTableDescriptor(org.apache.hadoop.fs.FileSystemfileSys, TableNametableName, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringlastIncrBackupId) @@ -2191,18 +2191,29 @@ service. TableName -HTable.getName() +AsyncBufferedMutatorImpl.getName() TableName -BufferedMutatorImpl.getName() +HTable.getName() TableName +BufferedMutatorImpl.getName() + + +TableName AsyncTableBase.getName() Gets the fully qualified table name instance of this table. + +TableName +AsyncBufferedMutator.getName() +Gets the fully qualified table name instance of the table that this + AsyncBufferedMutator writes to. + + TableName HTableWrapper.getName() @@ -2713,6 +2724,11 @@ service. +static TableDescriptor +TableDescriptorBuilder.copy(TableNamename, +TableDescriptordesc) + + private MultiServerCallable AsyncRequestFutureImpl.createCallable(ServerNameserver, TableNametableName, @@ -2720,14 +2736,14 @@ service. Create a callable. - + static Table HTableWrapper.createWrapper(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTableopenTables, TableNametableName, CoprocessorHost.Environmentenv, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) - + void Admin.deleteColumn(TableNametableName, byte[]columnFamily) @@ -2739,7 +2755,7 @@ service. - + void HBaseAdmin.deleteColumn(TableNametableName, byte[]columnFamily) @@ -2749,126 +2765,126 @@ service. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void AsyncHBaseAdmin.deleteColumnFamily(TableNametableName, byte[]columnFamily) - + void Admin.deleteColumnFamily(TableNametableName, byte[]columnFamily) Delete a column family from a table. - + void HBaseAdmin.deleteColumnFamily(TableNametableName, byte[]columnFamily) - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void AsyncAdmin.deleteColumnFamily(TableNametableName, byte[]columnFamily) Delete a column family from a table. - + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void RawAsyncHBaseAdmin.deleteColumnFamily(TableNametableName,
[29/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html index 55bad33..43b5be9 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html @@ -4947,35 +4947,26 @@ service. -private Cell -HRegion.reckonAppend(Celldelta, -CellcurrentValue, -longnow, -Appendmutation) +private static Cell +HRegion.reckonDelta(Celldelta, + CellcurrentCell, + byte[]columnFamily, + longnow, + Mutationmutation, + http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionCell,byte[]supplier) -private Cell -HRegion.reckonIncrement(Celldelta, - longdeltaAmount, - CellcurrentValue, - byte[]columnFamily, - longnow, - Mutationmutation) -Calculate new Increment Cell. - - - Cell CellFlatMap.remove(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Objecto) - + Cell HMobStore.resolve(Cellreference, booleancacheBlocks) Reads the cell from the mob file, and the read point does not count. - + Cell HMobStore.resolve(Cellreference, booleancacheBlocks, @@ -4984,7 +4975,7 @@ service. Reads the cell from the mob file. - + Cell CellFlatMap.CellFlatMapEntry.setValue(Cellvalue) @@ -5824,24 +5815,15 @@ service. -private Cell -HRegion.reckonAppend(Celldelta, -CellcurrentValue, -longnow, -Appendmutation) +private static Cell +HRegion.reckonDelta(Celldelta, + CellcurrentCell, + byte[]columnFamily, + longnow, + Mutationmutation, + http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true; title="class or interface in java.util.function">FunctionCell,byte[]supplier) -private Cell -HRegion.reckonIncrement(Celldelta, - longdeltaAmount, - CellcurrentValue, - byte[]columnFamily, - longnow, - Mutationmutation) -Calculate new Increment Cell. - - - boolean StoreFileScanner.requestSeek(Cellkv, booleanforward, @@ -5849,7 +5831,7 @@ service. Pretend we have done a seek but don't do it yet, if possible. - + boolean KeyValueScanner.requestSeek(Cellkv, booleanforward, @@ -5859,13 +5841,13 @@ service. row/column combination specified by the kv parameter. - + boolean SegmentScanner.requestSeek(Cellc, booleanforward, booleanuseBloom) - + boolean KeyValueHeap.requestSeek(Cellkey, booleanforward, @@ -5875,70 +5857,70 @@ service. row/column combination specified by the kv parameter. - + boolean NonLazyKeyValueScanner.requestSeek(Cellkv, booleanforward, booleanuseBloom) - + boolean ReversedKeyValueHeap.requestSeek(Cellkey, booleanforward, booleanuseBloom) - + boolean StoreFileScanner.reseek(Cellkey) - + boolean StoreScanner.reseek(Cellkv) - + boolean KeyValueScanner.reseek(Cellkey) Reseek the scanner at or after the specified KeyValue. - + boolean SegmentScanner.reseek(Cellcell) Reseek the scanner at or after the specified KeyValue. - + boolean ReversedStoreScanner.reseek(Cellkv) - + boolean KeyValueHeap.reseek(CellseekKey) This function is identical to the KeyValueHeap.seek(Cell) function except that scanner.seek(seekKey) is changed to scanner.reseek(seekKey). - + boolean ReversedKeyValueHeap.reseek(CellseekKey) - + (package private) static boolean StoreFileScanner.reseekAtOrAfter(HFileScanners, Cellk) - + private void StoreScanner.resetQueryMatcher(CelllastTopKey) - + Cell HMobStore.resolve(Cellreference, booleancacheBlocks) Reads the cell from the mob file, and the read point does not count. - + Cell HMobStore.resolve(Cellreference, booleancacheBlocks, @@ -5947,86 +5929,86 @@ service. Reads the cell from the mob file. - + protected void HRegion.restoreEdit(HStores, Cellcell, MemstoreSizememstoreSize) - + protected void StripeMultiFileWriter.sanityCheckLeft(byte[]left, Cellcell) Subclasses can call this method to make sure the first KV is within multi-writer range. - + protected void StripeMultiFileWriter.sanityCheckRight(byte[]right, Cellcell) Subclasses can call this method to make sure the last KV is within multi-writer range. - + boolean StoreFileScanner.seek(Cellkey) - +
[22/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutatorImpl.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutatorImpl.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutatorImpl.html new file mode 100644 index 000..560d41c --- /dev/null +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncBufferedMutatorImpl.html @@ -0,0 +1,549 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncBufferedMutatorImpl (Apache HBase 3.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10}; +var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Class AsyncBufferedMutatorImpl + + + +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">java.lang.Object + + +org.apache.hadoop.hbase.client.AsyncBufferedMutatorImpl + + + + + + + +All Implemented Interfaces: +http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true; title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true; title="class or interface in java.lang">AutoCloseable, AsyncBufferedMutator + + + +@InterfaceAudience.Private +class AsyncBufferedMutatorImpl +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +implements AsyncBufferedMutator +The implementation of AsyncBufferedMutator. Simply wrap an AsyncTableBase. + + + + + + + + + + + +Field Summary + +Fields + +Modifier and Type +Field and Description + + +private long +bufferedSize + + +private boolean +closed + + +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +futures + + +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListMutation +mutations + + +private AsyncTableBase +table + + +private long +writeBufferSize + + + + + + + + + +Constructor Summary + +Constructors + +Constructor and Description + + +AsyncBufferedMutatorImpl(AsyncTableBasetable, +longwriteBufferSize) + + + + + + + + + +Method Summary + +All MethodsInstance MethodsConcrete Methods + +Modifier and Type +Method and Description + + +void +close() +Performs a AsyncBufferedMutator.flush() and releases any resources held. + + + +void +flush() +Executes all the buffered, asynchronous operations. + + + +org.apache.hadoop.conf.Configuration +getConfiguration() +Returns the Configuration object used by this instance. + + + +TableName +getName() +Gets the fully qualified table name instance of the table that this + AsyncBufferedMutator writes to. + + + +long +getWriteBufferSize() +Returns the maximum size in bytes of the write buffer. + + + +private void +internalFlush() + + +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Mutationmutations) +Send some Mutations to the table. + + +
[27/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html index 6155ba0..2360a5f 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html @@ -1233,10 +1233,10 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -ClientSideRegionScanner(org.apache.hadoop.conf.Configurationconf, +ClientSideRegionScanner(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.FileSystemfs, org.apache.hadoop.fs.PathrootDir, - HTableDescriptorhtd, + TableDescriptorhtd, HRegionInfohri, Scanscan, ScanMetricsscanMetrics) @@ -2210,7 +2210,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -InputSplit(HTableDescriptorhtd, +InputSplit(TableDescriptorhtd, HRegionInforegionInfo, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringlocations, Scanscan, @@ -4319,8 +4319,8 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. private static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo -CreateTableProcedure.addReplicas(MasterProcedureEnvenv, - HTableDescriptorhTableDescriptor, +CreateTableProcedure.addReplicas(MasterProcedureEnvenv, + TableDescriptortableDescriptor, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInforegions) Create any replicas for the regions (the default replicas that was already created is passed to the method) @@ -4328,35 +4328,35 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. protected static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo -CreateTableProcedure.addTableToMeta(MasterProcedureEnvenv, - HTableDescriptorhTableDescriptor, +CreateTableProcedure.addTableToMeta(MasterProcedureEnvenv, + TableDescriptortableDescriptor, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInforegions) private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo -CloneSnapshotProcedure.createFilesystemLayout(MasterProcedureEnvenv, - HTableDescriptorhTableDescriptor, +CloneSnapshotProcedure.createFilesystemLayout(MasterProcedureEnvenv, + TableDescriptortableDescriptor, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfonewRegions) Create regions in file system. protected static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo -CreateTableProcedure.createFsLayout(MasterProcedureEnvenv, - HTableDescriptorhTableDescriptor, +CreateTableProcedure.createFsLayout(MasterProcedureEnvenv, + TableDescriptortableDescriptor, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfonewRegions) protected static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo -CreateTableProcedure.createFsLayout(MasterProcedureEnvenv, - HTableDescriptorhTableDescriptor, +CreateTableProcedure.createFsLayout(MasterProcedureEnvenv, + TableDescriptortableDescriptor, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfonewRegions, CreateTableProcedure.CreateHdfsRegionshdfsRegionHandler) private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo -CloneSnapshotProcedure.createFsLayout(MasterProcedureEnvenv, - HTableDescriptorhTableDescriptor, +CloneSnapshotProcedure.createFsLayout(MasterProcedureEnvenv, +
[11/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html b/devapidocs/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html index e4d9525..7848c62 100644 --- a/devapidocs/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html +++ b/devapidocs/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class FavoredNodeAssignmentHelper +public class FavoredNodeAssignmentHelper extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object Helper class for FavoredNodeLoadBalancer that has all the intelligence for racks, meta scans, etc. Instantiated by the FavoredNodeLoadBalancer when needed (from @@ -393,7 +393,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG @@ -402,7 +402,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? rackManager -privateRackManager rackManager +privateRackManager rackManager @@ -411,7 +411,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? rackToRegionServerMap -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListServerName rackToRegionServerMap +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListServerName rackToRegionServerMap @@ -420,7 +420,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? uniqueRackList -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String uniqueRackList +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String uniqueRackList @@ -429,7 +429,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? regionServerToRackMap -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String regionServerToRackMap +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String regionServerToRackMap @@ -438,7 +438,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? random -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true; title="class or interface in java.util">Random random +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true; title="class or interface in java.util">Random random @@ -447,7 +447,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? servers -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListServerName servers +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListServerName servers @@ -456,7 +456,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? FAVOREDNODES_QUALIFIER -public static finalbyte[]
[25/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html index 7c23e03..e3a1095 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html @@ -541,31 +541,31 @@ -OpenMetaHandler(Serverserver, +OpenMetaHandler(Serverserver, RegionServerServicesrsServices, HRegionInforegionInfo, - HTableDescriptorhtd, + TableDescriptorhtd, longmasterSystemTime) -OpenPriorityRegionHandler(Serverserver, +OpenPriorityRegionHandler(Serverserver, RegionServerServicesrsServices, HRegionInforegionInfo, - HTableDescriptorhtd, + TableDescriptorhtd, longmasterSystemTime) -OpenRegionHandler(Serverserver, +OpenRegionHandler(Serverserver, RegionServerServicesrsServices, HRegionInforegionInfo, - HTableDescriptorhtd, + TableDescriptorhtd, longmasterSystemTime) -OpenRegionHandler(Serverserver, +OpenRegionHandler(Serverserver, RegionServerServicesrsServices, HRegionInforegionInfo, - HTableDescriptorhtd, + TableDescriptorhtd, longmasterSystemTime, EventTypeeventType) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html index bb9845a..e5dee47 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html @@ -451,8 +451,8 @@ static boolean -ServerName.isSameHostnameAndPort(ServerNameleft, - ServerNameright) +ServerName.isSameAddress(ServerNameleft, + ServerNameright) boolean http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html index 89f3db5..9a48004 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html @@ -121,14 +121,10 @@ -protected TableDescriptors -HMaster.getFsTableDescriptors() - - TableDescriptors HMaster.getTableDescriptors() - + TableDescriptors MasterServices.getTableDescriptors()
[42/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilder.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilder.html b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilder.html new file mode 100644 index 000..8f947f8 --- /dev/null +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilder.html @@ -0,0 +1,157 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + +Source code + + + + +001/** +002 * Licensed to the Apache Software Foundation (ASF) under one +003 * or more contributor license agreements. See the NOTICE file +004 * distributed with this work for additional information +005 * regarding copyright ownership. The ASF licenses this file +006 * to you under the Apache License, Version 2.0 (the +007 * "License"); you may not use this file except in compliance +008 * with the License. You may obtain a copy of the License at +009 * +010 * http://www.apache.org/licenses/LICENSE-2.0 +011 * +012 * Unless required by applicable law or agreed to in writing, software +013 * distributed under the License is distributed on an "AS IS" BASIS, +014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +015 * See the License for the specific language governing permissions and +016 * limitations under the License. +017 */ +018package org.apache.hadoop.hbase.client; +019 +020import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts; +021 +022import java.util.concurrent.TimeUnit; +023 +024import org.apache.hadoop.hbase.classification.InterfaceAudience; +025 +026/** +027 * For creating {@link AsyncBufferedMutator}. +028 */ +029@InterfaceAudience.Public +030public interface AsyncBufferedMutatorBuilder { +031 +032 /** +033 * Set timeout for the background flush operation. +034 */ +035 AsyncBufferedMutatorBuilder setOperationTimeout(long timeout, TimeUnit unit); +036 +037 /** +038 * Set timeout for each rpc request when doing background flush. +039 */ +040 AsyncBufferedMutatorBuilder setRpcTimeout(long timeout, TimeUnit unit); +041 +042 /** +043 * Set the base pause time for retrying. We use an exponential policy to generate sleep time when +044 * retrying. +045 */ +046 AsyncBufferedMutatorBuilder setRetryPause(long pause, TimeUnit unit); +047 +048 /** +049 * Set the max retry times for an operation. Usually it is the max attempt times minus 1. +050 * p +051 * Operation timeout and max attempt times(or max retry times) are both limitations for retrying, +052 * we will stop retrying when we reach any of the limitations. +053 * @see #setMaxAttempts(int) +054 * @see #setOperationTimeout(long, TimeUnit) +055 */ +056 default AsyncBufferedMutatorBuilder setMaxRetries(int maxRetries) { +057return setMaxAttempts(retries2Attempts(maxRetries)); +058 } +059 +060 /** +061 * Set the max attempt times for an operation. Usually it is the max retry times plus 1. Operation +062 * timeout and max attempt times(or max retry times) are both limitations for retrying, we will +063 * stop retrying when we reach any of the limitations. +064 * @see #setMaxRetries(int) +065 * @see #setOperationTimeout(long, TimeUnit) +066 */ +067 AsyncBufferedMutatorBuilder setMaxAttempts(int maxAttempts); +068 +069 /** +070 * Set the number of retries that are allowed before we start to log. +071 */ +072 AsyncBufferedMutatorBuilder setStartLogErrorsCnt(int startLogErrorsCnt); +073 +074 /** +075 * Override the write buffer size specified by the provided {@link AsyncConnection}'s +076 * {@link org.apache.hadoop.conf.Configuration} instance, via the configuration key +077 * {@code hbase.client.write.buffer}. +078 */ +079 AsyncBufferedMutatorBuilder setWriteBufferSize(long writeBufferSize); +080 +081 /** +082 * Create the {@link AsyncBufferedMutator} instance. +083 */ +084 AsyncBufferedMutator build(); +085} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html index 0691430..4c15d38 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html @@ -144,7 +144,46 @@ 136 * @param pool the thread pool to use for executing callback 137 */ 138 AsyncAdminBuilder getAdminBuilder(ExecutorService pool); -139} +139 +140 /** +141 * Retrieve an
[20/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html index 2ba9d73..79fcaad 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html +++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html @@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private @InterfaceStability.Evolving -protected static class HBaseAdmin.ProcedureFutureV +protected static class HBaseAdmin.ProcedureFutureV extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">FutureV Future that waits on a procedure result. @@ -328,7 +328,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren exception -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true; title="class or interface in java.util.concurrent">ExecutionException exception +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true; title="class or interface in java.util.concurrent">ExecutionException exception @@ -337,7 +337,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren procResultFound -privateboolean procResultFound +privateboolean procResultFound @@ -346,7 +346,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren done -privateboolean done +privateboolean done @@ -355,7 +355,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren cancelled -privateboolean cancelled +privateboolean cancelled @@ -364,7 +364,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren result -privateV result +privateV result @@ -373,7 +373,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren admin -private finalHBaseAdmin admin +private finalHBaseAdmin admin @@ -382,7 +382,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren procId -private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">Long procId +private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">Long procId @@ -399,7 +399,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren ProcedureFuture -publicProcedureFuture(HBaseAdminadmin, +publicProcedureFuture(HBaseAdminadmin, http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">LongprocId) @@ -417,7 +417,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren cancel -publicbooleancancel(booleanmayInterruptIfRunning) +publicbooleancancel(booleanmayInterruptIfRunning) Specified by: http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true#cancel-boolean-; title="class or interface in java.util.concurrent">cancelin interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">FutureV @@ -430,7 +430,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren isCancelled -publicbooleanisCancelled() +publicbooleanisCancelled() Specified by: http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true#isCancelled--; title="class or interface in java.util.concurrent">isCancelledin interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">FutureV @@ -443,7 +443,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren abortProcedureResult -protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest) +protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest) throws
[08/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html index b6cc417..5e8e1b3 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":9,"i2":10,"i3":10,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10}; +var methods = {"i0":10,"i1":9,"i2":10,"i3":10,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -227,6 +227,18 @@ implements +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicLong +dataBlockElements +Current number of cached data block elements + + + +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicLong +dataBlockSize +Current size of data blocks + + + (package private) static float DEFAULT_ACCEPTABLE_FACTOR @@ -600,77 +612,89 @@ implements long +getCurrentDataSize() +Returns the occupied size of data blocks, in bytes. + + + +long getCurrentSize() Returns the occupied size of the block cache, in bytes. - + +long +getDataBlockCount() +Returns the number of data blocks currently cached in the block cache. + + + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapDataBlockEncoding,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true; title="class or interface in java.lang">Integer getEncodingCountsForTest() - + (package private) LruBlockCache.EvictionThread getEvictionThread() - + long getFreeSize() Returns the free size of the block cache, in bytes. - + (package private) http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBlockCacheKey,LruCachedBlock getMapForTests() - + long getMaxSize() Get the maximum size of this cache. - + (package private) long getOverhead() - + CacheStats getStats() Get counter statistics for this cache. - + (package private) BlockCache getVictimHandler() - + long heapSize() - + (package private) boolean isEvictionInProgress() - + http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true; title="class or interface in java.util">IteratorCachedBlock iterator() - + void logStats() - + private long memorySize() - + private long minSize() - + private long multiSize() - + void returnBlock(BlockCacheKeycacheKey, Cacheableblock) @@ -678,43 +702,43 @@ implements + private void runEviction() Multi-threaded call to run the eviction process. - + void setMaxSize(longmaxSize) Sets the max heap size that can be used by the BlockCache. - + void setVictimCache(BlockCachehandler) - + void shutdown() Shutdown the cache. - + private long singleSize() - + long size() Returns the total size of the block cache, in bytes. - + http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String toString() - + private long updateSizeMetrics(LruCachedBlockcb, booleanevict) @@ -1084,23 +1108,43 @@ implements Current size of cache + + + + + +dataBlockSize +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicLong dataBlockSize +Current size of data blocks + + elements -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicLong elements +private
[16/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/TableSnapshotScanner.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/TableSnapshotScanner.html b/devapidocs/org/apache/hadoop/hbase/client/TableSnapshotScanner.html index daac187..bce30ae 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/TableSnapshotScanner.html +++ b/devapidocs/org/apache/hadoop/hbase/client/TableSnapshotScanner.html @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class TableSnapshotScanner +public class TableSnapshotScanner extends AbstractClientScanner A Scanner which performs a scan over snapshot files. Using this class requires copying the snapshot to a temporary empty directory, which will copy the snapshot reference files into that @@ -184,7 +184,7 @@ extends fs -private HTableDescriptor +private TableDescriptor htd @@ -335,7 +335,7 @@ extends LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG @@ -344,7 +344,7 @@ extends conf -privateorg.apache.hadoop.conf.Configuration conf +privateorg.apache.hadoop.conf.Configuration conf @@ -353,7 +353,7 @@ extends snapshotName -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String snapshotName +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String snapshotName @@ -362,7 +362,7 @@ extends fs -privateorg.apache.hadoop.fs.FileSystem fs +privateorg.apache.hadoop.fs.FileSystem fs @@ -371,7 +371,7 @@ extends rootDir -privateorg.apache.hadoop.fs.Path rootDir +privateorg.apache.hadoop.fs.Path rootDir @@ -380,7 +380,7 @@ extends restoreDir -privateorg.apache.hadoop.fs.Path restoreDir +privateorg.apache.hadoop.fs.Path restoreDir @@ -389,7 +389,7 @@ extends scan -privateScan scan +privateScan scan @@ -398,7 +398,7 @@ extends regions -privatehttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListHRegionInfo regions +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListHRegionInfo regions @@ -407,7 +407,7 @@ extends htd -privateHTableDescriptor htd +privateTableDescriptor htd @@ -416,7 +416,7 @@ extends currentRegionScanner -privateClientSideRegionScanner currentRegionScanner +privateClientSideRegionScanner currentRegionScanner @@ -425,7 +425,7 @@ extends currentRegion -privateint currentRegion +privateint currentRegion @@ -442,7 +442,7 @@ extends TableSnapshotScanner -publicTableSnapshotScanner(org.apache.hadoop.conf.Configurationconf, +publicTableSnapshotScanner(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.PathrestoreDir, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringsnapshotName, Scanscan) @@ -467,7 +467,7 @@ extends TableSnapshotScanner -publicTableSnapshotScanner(org.apache.hadoop.conf.Configurationconf, +publicTableSnapshotScanner(org.apache.hadoop.conf.Configurationconf, org.apache.hadoop.fs.PathrootDir, org.apache.hadoop.fs.PathrestoreDir, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringsnapshotName, @@ -502,7 +502,7 @@ extends init -privatevoidinit() +privatevoidinit() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Throws: @@ -516,7 +516,7 @@ extends next -publicResultnext() +publicResultnext() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Description copied from interface:ResultScanner Grab the next row's worth of values. The scanner will return a Result. @@ -534,7 +534,7 @@ extends close -publicvoidclose() +publicvoidclose() Description copied from interface:ResultScanner Closes the scanner and releases any resources it has allocated @@ -545,7 +545,7 @@ extends renewLease -publicbooleanrenewLease() +publicbooleanrenewLease() Description copied from interface:ResultScanner Allow the client to renew the scanner's lease on the server. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/class-use/Append.html
[21/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html index 1fd7a37..b6ac6bd 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -276,51 +276,64 @@ implements getAdminStub(ServerNameserverName) +AsyncBufferedMutatorBuilder +getBufferedMutatorBuilder(TableNametableName) +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + + + +AsyncBufferedMutatorBuilder +getBufferedMutatorBuilder(TableNametableName, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + + + org.apache.hadoop.conf.Configuration getConfiguration() Returns the Configuration object used by this instance. - + (package private) AsyncRegionLocator getLocator() - + (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interface getMasterStub() - + NonceGenerator getNonceGenerator() - + AsyncTableBuilderRawAsyncTable getRawTableBuilder(TableNametableName) Returns an AsyncTableBuilder for creating RawAsyncTable. - + AsyncTableRegionLocator getRegionLocator(TableNametableName) Retrieve a AsyncRegionLocator implementation to inspect region information on a table. - + (package private) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface getRegionServerStub(ServerNameserverName) - + private HBaseRpcController getRpcController() - + AsyncTableBuilderAsyncTable getTableBuilder(TableNametableName, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) Returns an AsyncTableBuilder for creating AsyncTable. - + private void makeMasterStub(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interfacefuture) @@ -337,7 +350,7 @@ implements AsyncConnection -getAdmin, getAdmin, getRawTable, getTable +getAdmin, getAdmin, getBufferedMutator, getBufferedMutator, getRawTable, getTable @@ -780,7 +793,7 @@ implements - + getAdminBuilder publicAsyncAdminBuildergetAdminBuilder(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) @@ -794,6 +807,42 @@ implements + + + + +getBufferedMutatorBuilder +publicAsyncBufferedMutatorBuildergetBufferedMutatorBuilder(TableNametableName) +Description copied from interface:AsyncConnection +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + +Specified by: +getBufferedMutatorBuilderin interfaceAsyncConnection +Parameters: +tableName - the name of the table + + + + + + + + +getBufferedMutatorBuilder +publicAsyncBufferedMutatorBuildergetBufferedMutatorBuilder(TableNametableName, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool) +Description copied from interface:AsyncConnection +Returns an AsyncBufferedMutatorBuilder for creating AsyncBufferedMutator. + +Specified by: +getBufferedMutatorBuilderin interfaceAsyncConnection +Parameters: +tableName - the name of the table +pool - the thread pool to use for executing callback + + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f1f2a0b/devapidocs/org/apache/hadoop/hbase/client/ClientSideRegionScanner.html
hbase git commit: HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan
Repository: hbase Updated Branches: refs/heads/branch-2 ec7bca176 -> 08212e50f HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan Signed-off-by: tedyuProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/08212e50 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/08212e50 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/08212e50 Branch: refs/heads/branch-2 Commit: 08212e50fff0ed61d6038d20dfdb7ceaf477420f Parents: ec7bca1 Author: Guangxu Cheng Authored: Tue Aug 22 11:29:32 2017 +0800 Committer: tedyu Committed: Thu Aug 24 08:09:25 2017 -0700 -- .../org/apache/hadoop/hbase/rest/Constants.java | 1 + .../apache/hadoop/hbase/rest/TableResource.java | 5 ++--- .../hadoop/hbase/rest/TableScanResource.java | 19 --- 3 files changed, 11 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/08212e50/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java index 3326f2f..e8502e7 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java @@ -78,6 +78,7 @@ public interface Constants { String SCAN_FETCH_SIZE = "hbase.rest.scan.fetchsize"; String SCAN_FILTER = "filter"; String SCAN_REVERSED = "reversed"; + String SCAN_CACHE_BLOCKS = "cacheblocks"; String CUSTOM_FILTERS = "hbase.rest.custom.filters"; String ROW_KEYS_PARAM_NAME = "row"; http://git-wip-us.apache.org/repos/asf/hbase/blob/08212e50/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java index 3019e40..1f56881 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java @@ -121,9 +121,7 @@ public class TableResource extends ResourceBase { @Path("{scanspec: .*[*]$}") public TableScanResource getScanResource( - final @Context UriInfo uriInfo, final @PathParam("scanspec") String scanSpec, - final @HeaderParam("Accept") String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @@ -133,7 +131,7 @@ public class TableResource extends ResourceBase { @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, - @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, + @DefaultValue("true") @QueryParam(Constants.SCAN_CACHE_BLOCKS) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String paramFilter) { try { @@ -201,6 +199,7 @@ public class TableResource extends ResourceBase { int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10); tableScan.setCaching(fetchSize); tableScan.setReversed(reversed); + tableScan.setCacheBlocks(cacheBlocks); return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit); } catch (IOException exp) { servlet.getMetrics().incrementFailedScanRequests(1); http://git-wip-us.apache.org/repos/asf/hbase/blob/08212e50/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java index 5cc2c7b..3effc01 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java @@ -67,6 +67,9 @@ public class TableScanResource extends ResourceBase { @GET @Produces({ Constants.MIMETYPE_XML, Constants.MIMETYPE_JSON }) public CellSetModelStream get(final @Context UriInfo uriInfo) { +if (LOG.isTraceEnabled()) { +
hbase git commit: HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan
Repository: hbase Updated Branches: refs/heads/master 6e7baa07f -> 321bc55f9 HBASE-18647 Parameter cacheBlocks does not take effect in REST API for scan Signed-off-by: tedyuProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/321bc55f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/321bc55f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/321bc55f Branch: refs/heads/master Commit: 321bc55f91507491ad65b07fae9f3543451a2aeb Parents: 6e7baa0 Author: Guangxu Cheng Authored: Tue Aug 22 11:29:32 2017 +0800 Committer: tedyu Committed: Thu Aug 24 08:08:54 2017 -0700 -- .../org/apache/hadoop/hbase/rest/Constants.java | 1 + .../apache/hadoop/hbase/rest/TableResource.java | 5 ++--- .../hadoop/hbase/rest/TableScanResource.java | 19 --- 3 files changed, 11 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/321bc55f/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java index 3326f2f..e8502e7 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java @@ -78,6 +78,7 @@ public interface Constants { String SCAN_FETCH_SIZE = "hbase.rest.scan.fetchsize"; String SCAN_FILTER = "filter"; String SCAN_REVERSED = "reversed"; + String SCAN_CACHE_BLOCKS = "cacheblocks"; String CUSTOM_FILTERS = "hbase.rest.custom.filters"; String ROW_KEYS_PARAM_NAME = "row"; http://git-wip-us.apache.org/repos/asf/hbase/blob/321bc55f/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java index 3019e40..1f56881 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java @@ -121,9 +121,7 @@ public class TableResource extends ResourceBase { @Path("{scanspec: .*[*]$}") public TableScanResource getScanResource( - final @Context UriInfo uriInfo, final @PathParam("scanspec") String scanSpec, - final @HeaderParam("Accept") String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @@ -133,7 +131,7 @@ public class TableResource extends ResourceBase { @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, - @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, + @DefaultValue("true") @QueryParam(Constants.SCAN_CACHE_BLOCKS) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String paramFilter) { try { @@ -201,6 +199,7 @@ public class TableResource extends ResourceBase { int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10); tableScan.setCaching(fetchSize); tableScan.setReversed(reversed); + tableScan.setCacheBlocks(cacheBlocks); return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit); } catch (IOException exp) { servlet.getMetrics().incrementFailedScanRequests(1); http://git-wip-us.apache.org/repos/asf/hbase/blob/321bc55f/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java index 5cc2c7b..3effc01 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java @@ -67,6 +67,9 @@ public class TableScanResource extends ResourceBase { @GET @Produces({ Constants.MIMETYPE_XML, Constants.MIMETYPE_JSON }) public CellSetModelStream get(final @Context UriInfo uriInfo) { +if (LOG.isTraceEnabled()) { +
hbase git commit: HBASE-15982 Interface ReplicationEndpoint extends Guava's Service
Repository: hbase Updated Branches: refs/heads/branch-2 1ae9a3901 -> ec7bca176 HBASE-15982 Interface ReplicationEndpoint extends Guava's Service Breaking change to our ReplicationEndpoint and BaseReplicationEndpoint. ReplicationEndpoint implemented Guava 0.12 Service. An abstract subclass, BaseReplicationEndpoint, provided default implementations and facility, among other things, by extending Guava AbstractService class. Both of these HBase classes were marked LimitedPrivate for REPLICATION so these classes were semi-public and made it so Guava 0.12 was part of our API. Having Guava in our API was a mistake. It anchors us and the implementation of the Interface to Guava 0.12. This is untenable given Guava changes and that the Service Interface in particular has had extensive revamp and improvement done. We can't hold to the Guava Interface. It changed. We can't stay on Guava 0.12; implementors and others on our CLASSPATH won't abide being stuck on an old Guava. So this class makes breaking changes. The unhitching of our Interface from Guava could only be done in a breaking manner. It undoes the LimitedPrivate on BaseReplicationEndpoint while keeping it for the RE Interface. It means consumers will have to copy/paste the AbstractService-based BRE into their own codebase also supplying their own Guava; HBase no longer 'supplies' this (our Guava usage has been internalized, relocated). This patch then adds into RE the basic methods RE needs of the old Guava Service rather than return a Service to start/stop only to go back to the RE instance to do actual work. A few method names had to be changed so could make implementations with Guava Service internally and not have RE method names and types clash). Semantics remained the same otherwise. For example startAsync and stopAsync in Guava are start and stop in RE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ec7bca17 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ec7bca17 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ec7bca17 Branch: refs/heads/branch-2 Commit: ec7bca1769dcd825289e485c331e716df8ee33a1 Parents: 1ae9a39 Author: Michael StackAuthored: Tue Aug 8 21:55:47 2017 +0800 Committer: Michael Stack Committed: Thu Aug 24 08:06:09 2017 -0700 -- .../replication/BaseReplicationEndpoint.java| 16 ++-- .../replication/HBaseReplicationEndpoint.java | 10 +++ .../hbase/replication/ReplicationEndpoint.java | 88 +++- .../regionserver/ReplicationSource.java | 38 - .../VisibilityReplicationEndpoint.java | 40 - .../TestReplicationAdminWithClusters.java | 10 +++ .../replication/TestReplicationEndpoint.java| 10 +++ .../replication/TestReplicationSource.java | 2 +- 8 files changed, 160 insertions(+), 54 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/ec7bca17/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java index ae4e7cc..5b9cef7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java @@ -24,15 +24,16 @@ import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService; + /** - * A Base implementation for {@link ReplicationEndpoint}s. Users should consider extending this - * class rather than implementing {@link ReplicationEndpoint} directly for better backwards - * compatibility. + * A Base implementation for {@link ReplicationEndpoint}s. For internal use. Uses our internal + * Guava. */ -@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION) +// This class has been made InterfaceAudience.Private in 2.0.0. It used to be +// LimitedPrivate. See HBASE-15982. +@InterfaceAudience.Private public abstract class BaseReplicationEndpoint extends AbstractService implements ReplicationEndpoint { @@ -109,4 +110,9 @@ public abstract class BaseReplicationEndpoint extends AbstractService public boolean canReplicateToSameCluster() { return false; } + + @Override + public boolean isStarting()
hbase git commit: HBASE-15982 Interface ReplicationEndpoint extends Guava's Service
Repository: hbase Updated Branches: refs/heads/master d12eb7a4a -> 6e7baa07f HBASE-15982 Interface ReplicationEndpoint extends Guava's Service Breaking change to our ReplicationEndpoint and BaseReplicationEndpoint. ReplicationEndpoint implemented Guava 0.12 Service. An abstract subclass, BaseReplicationEndpoint, provided default implementations and facility, among other things, by extending Guava AbstractService class. Both of these HBase classes were marked LimitedPrivate for REPLICATION so these classes were semi-public and made it so Guava 0.12 was part of our API. Having Guava in our API was a mistake. It anchors us and the implementation of the Interface to Guava 0.12. This is untenable given Guava changes and that the Service Interface in particular has had extensive revamp and improvement done. We can't hold to the Guava Interface. It changed. We can't stay on Guava 0.12; implementors and others on our CLASSPATH won't abide being stuck on an old Guava. So this class makes breaking changes. The unhitching of our Interface from Guava could only be done in a breaking manner. It undoes the LimitedPrivate on BaseReplicationEndpoint while keeping it for the RE Interface. It means consumers will have to copy/paste the AbstractService-based BRE into their own codebase also supplying their own Guava; HBase no longer 'supplies' this (our Guava usage has been internalized, relocated). This patch then adds into RE the basic methods RE needs of the old Guava Service rather than return a Service to start/stop only to go back to the RE instance to do actual work. A few method names had to be changed so could make implementations with Guava Service internally and not have RE method names and types clash). Semantics remained the same otherwise. For example startAsync and stopAsync in Guava are start and stop in RE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e7baa07 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e7baa07 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e7baa07 Branch: refs/heads/master Commit: 6e7baa07f0b1f5841379545acaf23d36f50de2c2 Parents: d12eb7a Author: Michael StackAuthored: Tue Aug 8 21:55:47 2017 +0800 Committer: Michael Stack Committed: Thu Aug 24 08:05:27 2017 -0700 -- .../replication/BaseReplicationEndpoint.java| 16 ++-- .../replication/HBaseReplicationEndpoint.java | 10 +++ .../hbase/replication/ReplicationEndpoint.java | 88 +++- .../regionserver/ReplicationSource.java | 38 - .../VisibilityReplicationEndpoint.java | 40 - .../TestReplicationAdminWithClusters.java | 10 +++ .../replication/TestReplicationEndpoint.java| 10 +++ .../replication/TestReplicationSource.java | 2 +- 8 files changed, 160 insertions(+), 54 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6e7baa07/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java index ae4e7cc..5b9cef7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java @@ -24,15 +24,16 @@ import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService; + /** - * A Base implementation for {@link ReplicationEndpoint}s. Users should consider extending this - * class rather than implementing {@link ReplicationEndpoint} directly for better backwards - * compatibility. + * A Base implementation for {@link ReplicationEndpoint}s. For internal use. Uses our internal + * Guava. */ -@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION) +// This class has been made InterfaceAudience.Private in 2.0.0. It used to be +// LimitedPrivate. See HBASE-15982. +@InterfaceAudience.Private public abstract class BaseReplicationEndpoint extends AbstractService implements ReplicationEndpoint { @@ -109,4 +110,9 @@ public abstract class BaseReplicationEndpoint extends AbstractService public boolean canReplicateToSameCluster() { return false; } + + @Override + public boolean isStarting() { +
hbase git commit: HBASE-18347 Implement a BufferedMutator for async client
Repository: hbase Updated Branches: refs/heads/branch-2 45b20da23 -> 1ae9a3901 HBASE-18347 Implement a BufferedMutator for async client Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ae9a390 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ae9a390 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ae9a390 Branch: refs/heads/branch-2 Commit: 1ae9a39011f2ee2dfe013a32a990cff34f6ea428 Parents: 45b20da Author: zhangduoAuthored: Mon Aug 21 18:37:26 2017 +0800 Committer: zhangduo Committed: Thu Aug 24 16:01:30 2017 +0800 -- .../hbase/client/AsyncBufferedMutator.java | 84 +++ .../client/AsyncBufferedMutatorBuilder.java | 85 +++ .../client/AsyncBufferedMutatorBuilderImpl.java | 85 +++ .../hbase/client/AsyncBufferedMutatorImpl.java | 144 +++ .../hadoop/hbase/client/AsyncConnection.java| 39 + .../client/AsyncConnectionConfiguration.java| 9 ++ .../hbase/client/AsyncConnectionImpl.java | 11 ++ .../hbase/client/TestAsyncBufferMutator.java| 128 + 8 files changed, 585 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1ae9a390/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java new file mode 100644 index 000..ad9279b --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.io.Closeable; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * Used to communicate with a single HBase table in batches. Obtain an instance from a + * {@link AsyncConnection} and call {@link #close()} afterwards. + * + * The implementation is required to be thread safe. + */ +@InterfaceAudience.Public +public interface AsyncBufferedMutator extends Closeable { + + /** + * Gets the fully qualified table name instance of the table that this + * {@code AsyncBufferedMutator} writes to. + */ + TableName getName(); + + /** + * Returns the {@link org.apache.hadoop.conf.Configuration} object used by this instance. + * + * The reference returned is not a copy, so any change made to it will affect this instance. + */ + Configuration getConfiguration(); + + /** + * Sends a {@link Mutation} to the table. The mutations will be buffered and sent over the wire as + * part of a batch. Currently only supports {@link Put} and {@link Delete} mutations. + * @param mutation The data to send. + */ + CompletableFuture mutate(Mutation mutation); + + /** + * Send some {@link Mutation}s to the table. The mutations will be buffered and sent over the wire + * as part of a batch. There is no guarantee of sending entire content of {@code mutations} in a + * single batch, the implementations are free to break it up according to the write buffer + * capacity. + * @param mutations The data to send. + */ + List mutate(List mutations); + + /** + * Executes all the buffered, asynchronous operations. + */ + void flush(); + + /** + * Performs a {@link #flush()} and releases any resources held. + */ + @Override + void close(); + + /** + * Returns the maximum size in bytes of the write buffer. + * + * The default value comes from the configuration parameter {@code hbase.client.write.buffer}. + * @return The size of the write buffer in bytes. + */ + long getWriteBufferSize(); +}
hbase git commit: HBASE-18347 Implement a BufferedMutator for async client
Repository: hbase Updated Branches: refs/heads/master 25ee5f7f8 -> d12eb7a4a HBASE-18347 Implement a BufferedMutator for async client Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d12eb7a4 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d12eb7a4 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d12eb7a4 Branch: refs/heads/master Commit: d12eb7a4aae5c2dc7b230bf2a12d2313b93b8ba9 Parents: 25ee5f7 Author: zhangduoAuthored: Mon Aug 21 18:37:26 2017 +0800 Committer: zhangduo Committed: Thu Aug 24 15:55:43 2017 +0800 -- .../hbase/client/AsyncBufferedMutator.java | 84 +++ .../client/AsyncBufferedMutatorBuilder.java | 85 +++ .../client/AsyncBufferedMutatorBuilderImpl.java | 85 +++ .../hbase/client/AsyncBufferedMutatorImpl.java | 144 +++ .../hadoop/hbase/client/AsyncConnection.java| 39 + .../client/AsyncConnectionConfiguration.java| 9 ++ .../hbase/client/AsyncConnectionImpl.java | 11 ++ .../hbase/client/TestAsyncBufferMutator.java| 128 + 8 files changed, 585 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/d12eb7a4/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java new file mode 100644 index 000..ad9279b --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutator.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.io.Closeable; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * Used to communicate with a single HBase table in batches. Obtain an instance from a + * {@link AsyncConnection} and call {@link #close()} afterwards. + * + * The implementation is required to be thread safe. + */ +@InterfaceAudience.Public +public interface AsyncBufferedMutator extends Closeable { + + /** + * Gets the fully qualified table name instance of the table that this + * {@code AsyncBufferedMutator} writes to. + */ + TableName getName(); + + /** + * Returns the {@link org.apache.hadoop.conf.Configuration} object used by this instance. + * + * The reference returned is not a copy, so any change made to it will affect this instance. + */ + Configuration getConfiguration(); + + /** + * Sends a {@link Mutation} to the table. The mutations will be buffered and sent over the wire as + * part of a batch. Currently only supports {@link Put} and {@link Delete} mutations. + * @param mutation The data to send. + */ + CompletableFuture mutate(Mutation mutation); + + /** + * Send some {@link Mutation}s to the table. The mutations will be buffered and sent over the wire + * as part of a batch. There is no guarantee of sending entire content of {@code mutations} in a + * single batch, the implementations are free to break it up according to the write buffer + * capacity. + * @param mutations The data to send. + */ + List mutate(List mutations); + + /** + * Executes all the buffered, asynchronous operations. + */ + void flush(); + + /** + * Performs a {@link #flush()} and releases any resources held. + */ + @Override + void close(); + + /** + * Returns the maximum size in bytes of the write buffer. + * + * The default value comes from the configuration parameter {@code hbase.client.write.buffer}. + * @return The size of the write buffer in bytes. + */ + long getWriteBufferSize(); +}
hbase git commit: HBASE-18546 Always overwrite the TS for Append/Increment unless no existing cells are found
Repository: hbase Updated Branches: refs/heads/branch-2 8c84793d2 -> 45b20da23 HBASE-18546 Always overwrite the TS for Append/Increment unless no existing cells are found Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45b20da2 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45b20da2 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45b20da2 Branch: refs/heads/branch-2 Commit: 45b20da23bde17ed39f9d2c930ced063ef105ccc Parents: 8c84793 Author: Chia-Ping TsaiAuthored: Thu Aug 24 14:23:36 2017 +0800 Committer: Chia-Ping Tsai Committed: Thu Aug 24 14:40:44 2017 +0800 -- .../hadoop/hbase/protobuf/ProtobufUtil.java | 201 ++- .../hbase/shaded/protobuf/ProtobufUtil.java | 198 ++ .../hbase/shaded/protobuf/RequestConverter.java | 10 +- .../hadoop/hbase/regionserver/HRegion.java | 99 +++-- .../hbase/client/TestAppendFromClientSide.java | 85 .../client/TestIncrementsFromClientSide.java| 23 +++ .../hadoop/hbase/protobuf/TestProtobufUtil.java | 8 +- .../hbase/shaded/protobuf/TestProtobufUtil.java | 86 8 files changed, 354 insertions(+), 356 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/45b20da2/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 6f9238a..43813ea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableSet; +import java.util.function.Function; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -617,83 +618,75 @@ public final class ProtobufUtil { return delete; } - /** - * Convert a protocol buffer Mutate to an Append - * @param cellScanner - * @param proto the protocol buffer Mutate to convert - * @return the converted client Append - * @throws IOException - */ - public static Append toAppend(final MutationProto proto, final CellScanner cellScanner) - throws IOException { -MutationType type = proto.getMutateType(); -assert type == MutationType.APPEND : type.name(); -byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null; -Append append = null; -int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0; + @FunctionalInterface + private interface ConsumerWithException { +void accept(T t, U u) throws IOException; + } + + private static T toDelta(Function supplier, ConsumerWithException consumer, + final MutationProto proto, final CellScanner cellScanner) throws IOException { +byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null; +T mutation = row == null ? null : supplier.apply(new Bytes(row)); +int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0; if (cellCount > 0) { // The proto has metadata only and the data is separate to be found in the cellScanner. if (cellScanner == null) { throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + - toShortString(proto)); +toShortString(proto)); } for (int i = 0; i < cellCount; i++) { if (!cellScanner.advance()) { throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + -" no cell returned: " + toShortString(proto)); + " no cell returned: " + toShortString(proto)); } Cell cell = cellScanner.current(); -if (append == null) { - append = new Append(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); +if (mutation == null) { + mutation = supplier.apply(new Bytes(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength())); } -append.add(cell); +consumer.accept(mutation, cell); } } else { - append = new Append(row); - for (ColumnValue column: proto.getColumnValueList()) { + if (mutation == null) { +throw new IllegalArgumentException("row cannot be null"); + } + for (ColumnValue column : proto.getColumnValueList()) { byte[] family = column.getFamily().toByteArray(); -for (QualifierValue qv: column.getQualifierValueList()) { +for
hbase git commit: HBASE-18546 Always overwrite the TS for Append/Increment unless no existing cells are found
Repository: hbase Updated Branches: refs/heads/master 3b444a066 -> 25ee5f7f8 HBASE-18546 Always overwrite the TS for Append/Increment unless no existing cells are found Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/25ee5f7f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/25ee5f7f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/25ee5f7f Branch: refs/heads/master Commit: 25ee5f7f8406b358aa0e7ac59ed661fef82183b8 Parents: 3b444a0 Author: Chia-Ping TsaiAuthored: Thu Aug 24 14:23:36 2017 +0800 Committer: Chia-Ping Tsai Committed: Thu Aug 24 14:35:22 2017 +0800 -- .../hadoop/hbase/protobuf/ProtobufUtil.java | 201 ++- .../hbase/shaded/protobuf/ProtobufUtil.java | 198 ++ .../hbase/shaded/protobuf/RequestConverter.java | 10 +- .../hadoop/hbase/regionserver/HRegion.java | 99 +++-- .../hbase/client/TestAppendFromClientSide.java | 85 .../client/TestIncrementsFromClientSide.java| 23 +++ .../hadoop/hbase/protobuf/TestProtobufUtil.java | 8 +- .../hbase/shaded/protobuf/TestProtobufUtil.java | 86 8 files changed, 354 insertions(+), 356 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/25ee5f7f/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 01ba0e0..79a874e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableSet; +import java.util.function.Function; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -616,83 +617,75 @@ public final class ProtobufUtil { return delete; } - /** - * Convert a protocol buffer Mutate to an Append - * @param cellScanner - * @param proto the protocol buffer Mutate to convert - * @return the converted client Append - * @throws IOException - */ - public static Append toAppend(final MutationProto proto, final CellScanner cellScanner) - throws IOException { -MutationType type = proto.getMutateType(); -assert type == MutationType.APPEND : type.name(); -byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null; -Append append = null; -int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0; + @FunctionalInterface + private interface ConsumerWithException { +void accept(T t, U u) throws IOException; + } + + private static T toDelta(Function supplier, ConsumerWithException consumer, + final MutationProto proto, final CellScanner cellScanner) throws IOException { +byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null; +T mutation = row == null ? null : supplier.apply(new Bytes(row)); +int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0; if (cellCount > 0) { // The proto has metadata only and the data is separate to be found in the cellScanner. if (cellScanner == null) { throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + - toShortString(proto)); +toShortString(proto)); } for (int i = 0; i < cellCount; i++) { if (!cellScanner.advance()) { throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + -" no cell returned: " + toShortString(proto)); + " no cell returned: " + toShortString(proto)); } Cell cell = cellScanner.current(); -if (append == null) { - append = new Append(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); +if (mutation == null) { + mutation = supplier.apply(new Bytes(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength())); } -append.add(cell); +consumer.accept(mutation, cell); } } else { - append = new Append(row); - for (ColumnValue column: proto.getColumnValueList()) { + if (mutation == null) { +throw new IllegalArgumentException("row cannot be null"); + } + for (ColumnValue column : proto.getColumnValueList()) { byte[] family = column.getFamily().toByteArray(); -for (QualifierValue qv: column.getQualifierValueList()) { +for (QualifierValue
hbase git commit: HBASE-18224 Upgrade Jetty
Repository: hbase Updated Branches: refs/heads/branch-2 e2ce252b5 -> 8c84793d2 HBASE-18224 Upgrade Jetty Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8c84793d Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8c84793d Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8c84793d Branch: refs/heads/branch-2 Commit: 8c84793d2214239a3e49b4582df1c2869ef32378 Parents: e2ce252 Author: Michael StackAuthored: Tue Aug 22 13:33:08 2017 -0700 Committer: Michael Stack Committed: Wed Aug 23 23:14:13 2017 -0700 -- hbase-common/src/main/resources/hbase-default.xml | 2 +- .../apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java| 4 ++-- .../test/java/org/apache/hadoop/hbase/http/TestHttpServer.java | 5 +++-- .../java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java| 2 +- .../java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java | 4 ++-- .../org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java| 4 ++-- pom.xml | 2 +- 7 files changed, 12 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8c84793d/hbase-common/src/main/resources/hbase-default.xml -- diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index d7944f6..01c4c58 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -1602,7 +1602,7 @@ possible configurations would overwhelm and obscure the important. hbase.http.max.threads -10 +16 The maximum number of threads that the HTTP Server will create in its ThreadPool. http://git-wip-us.apache.org/repos/asf/hbase/blob/8c84793d/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java index 680a902..185df41 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java @@ -97,12 +97,12 @@ public class FavoredNodeLoadBalancer extends BaseLoadBalancer implements Favored LOG.warn("Not running balancer since exception was thrown " + ie); return plans; } -Map serverNameToServerNameWithoutCode = new HashMap<>(); +// This is not used? Findbugs says so: Map serverNameToServerNameWithoutCode = new HashMap<>(); Map serverNameWithoutCodeToServerName = new HashMap<>(); ServerManager serverMgr = super.services.getServerManager(); for (ServerName sn: serverMgr.getOnlineServersList()) { ServerName s = ServerName.valueOf(sn.getHostname(), sn.getPort(), ServerName.NON_STARTCODE); - serverNameToServerNameWithoutCode.put(sn, s); + // FindBugs complains about useless store! serverNameToServerNameWithoutCode.put(sn, s); serverNameWithoutCodeToServerName.put(s, sn); } for (Map.Entry entry : clusterState.entrySet()) { http://git-wip-us.apache.org/repos/asf/hbase/blob/8c84793d/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index 31b5b8d..e9a56ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -74,7 +74,8 @@ public class TestHttpServer extends HttpServerFunctionalTest { private static final Log LOG = LogFactory.getLog(TestHttpServer.class); private static HttpServer server; private static URL baseUrl; - private static final int MAX_THREADS = 10; + // jetty 9.4.x needs this many threads to start, even in the small. + static final int MAX_THREADS = 16; @SuppressWarnings("serial") public static class EchoMapServlet extends HttpServlet { @@ -150,7 +151,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { @BeforeClass public static void setup() throws Exception { Configuration conf = new Configuration(); -conf.setInt(HttpServer.HTTP_MAX_THREADS, 10); +
hbase git commit: HBASE-18224 Upgrade Jetty
Repository: hbase Updated Branches: refs/heads/master 25ff9d0bb -> 3b444a066 HBASE-18224 Upgrade Jetty Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3b444a06 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3b444a06 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3b444a06 Branch: refs/heads/master Commit: 3b444a066c0c699aff749713209950198f1b21e4 Parents: 25ff9d0 Author: Michael StackAuthored: Tue Aug 22 13:33:08 2017 -0700 Committer: Michael Stack Committed: Wed Aug 23 23:12:59 2017 -0700 -- hbase-common/src/main/resources/hbase-default.xml | 2 +- .../apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java| 4 ++-- .../test/java/org/apache/hadoop/hbase/http/TestHttpServer.java | 5 +++-- .../java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java| 2 +- .../java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java | 4 ++-- .../org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java| 4 ++-- pom.xml | 2 +- 7 files changed, 12 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/3b444a06/hbase-common/src/main/resources/hbase-default.xml -- diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index 43efd4b..6b8849e 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -1603,7 +1603,7 @@ possible configurations would overwhelm and obscure the important. hbase.http.max.threads -10 +16 The maximum number of threads that the HTTP Server will create in its ThreadPool. http://git-wip-us.apache.org/repos/asf/hbase/blob/3b444a06/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java index 680a902..185df41 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java @@ -97,12 +97,12 @@ public class FavoredNodeLoadBalancer extends BaseLoadBalancer implements Favored LOG.warn("Not running balancer since exception was thrown " + ie); return plans; } -Map serverNameToServerNameWithoutCode = new HashMap<>(); +// This is not used? Findbugs says so: Map serverNameToServerNameWithoutCode = new HashMap<>(); Map serverNameWithoutCodeToServerName = new HashMap<>(); ServerManager serverMgr = super.services.getServerManager(); for (ServerName sn: serverMgr.getOnlineServersList()) { ServerName s = ServerName.valueOf(sn.getHostname(), sn.getPort(), ServerName.NON_STARTCODE); - serverNameToServerNameWithoutCode.put(sn, s); + // FindBugs complains about useless store! serverNameToServerNameWithoutCode.put(sn, s); serverNameWithoutCodeToServerName.put(s, sn); } for (Map.Entry entry : clusterState.entrySet()) { http://git-wip-us.apache.org/repos/asf/hbase/blob/3b444a06/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index 31b5b8d..e9a56ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -74,7 +74,8 @@ public class TestHttpServer extends HttpServerFunctionalTest { private static final Log LOG = LogFactory.getLog(TestHttpServer.class); private static HttpServer server; private static URL baseUrl; - private static final int MAX_THREADS = 10; + // jetty 9.4.x needs this many threads to start, even in the small. + static final int MAX_THREADS = 16; @SuppressWarnings("serial") public static class EchoMapServlet extends HttpServlet { @@ -150,7 +151,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { @BeforeClass public static void setup() throws Exception { Configuration conf = new Configuration(); -conf.setInt(HttpServer.HTTP_MAX_THREADS, 10); +