[14/22] hbase-site git commit: Published site at .

2017-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6f0c8299/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
index df7d03c..59d8ee8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
@@ -40,994 +40,1004 @@
 032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
 033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
 034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035
-036import java.io.IOException;
-037import java.net.URI;
-038import java.util.List;
-039
-040import 
org.apache.commons.cli.CommandLine;
-041import 
org.apache.commons.cli.HelpFormatter;
-042import org.apache.commons.cli.Options;
-043import 
org.apache.commons.lang.StringUtils;
-044import 
org.apache.hadoop.conf.Configuration;
-045import 
org.apache.hadoop.conf.Configured;
-046import org.apache.hadoop.fs.FileSystem;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.HBaseConfiguration;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-051import 
org.apache.hadoop.hbase.backup.BackupInfo;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-053import 
org.apache.hadoop.hbase.backup.BackupRequest;
-054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-055import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
-056import 
org.apache.hadoop.hbase.backup.BackupType;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
-058import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Connection;
-061import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-064
-065/**
-066 * General backup commands, options and 
usage messages
-067 */
-068
-069@InterfaceAudience.Private
-070public final class BackupCommands {
-071
-072  public final static String 
INCORRECT_USAGE = "Incorrect usage";
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
+037
+038import java.io.IOException;
+039import java.net.URI;
+040import java.util.List;
+041
+042import 
org.apache.commons.cli.CommandLine;
+043import 
org.apache.commons.cli.HelpFormatter;
+044import org.apache.commons.cli.Options;
+045import 
org.apache.commons.lang.StringUtils;
+046import 
org.apache.hadoop.conf.Configuration;
+047import 
org.apache.hadoop.conf.Configured;
+048import org.apache.hadoop.fs.FileSystem;
+049import org.apache.hadoop.fs.Path;
+050import 
org.apache.hadoop.hbase.HBaseConfiguration;
+051import 
org.apache.hadoop.hbase.TableName;
+052import 
org.apache.hadoop.hbase.backup.BackupAdmin;
+053import 
org.apache.hadoop.hbase.backup.BackupInfo;
+054import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+055import 
org.apache.hadoop.hbase.backup.BackupRequest;
+056import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+057import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
+058import 
org.apache.hadoop.hbase.backup.BackupType;
+059import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+060import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
+061import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+062import 
org.apache.hadoop.hbase.client.Connection;
+063import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+064import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066
+067/**
+068 * General backup commands, options and 
usage messages
+069 */
+070
+071@InterfaceAudience.Private
+072public final class BackupCommands {
 073
-074  public static final String USAGE = 
"Usage: hbase backup COMMAND [command-specific arguments]\n"
-075  + "where COMMAND is one of:\n" + "  
create create a new backup image\n"
-076  + "  delete delete an existing 
backup image\n"
-077  + "  describe   show the detailed 
information of a backup image\n"
-078  + "  historyshow history of all 
successful backups\n"
-079  + "  progress   show the progress 
of the latest backup request\n"
-080  + "  setbackup set 
management\n"
-081  + "  repair repair backup 
system tab

[14/22] hbase-site git commit: Published site at .

2017-11-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/team-list.html
--
diff --git a/team-list.html b/team-list.html
index 68f9d7c..99f1874 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Team
 
@@ -717,7 +717,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-21
+  Last Published: 
2017-11-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/testdevapidocs/allclasses-frame.html
--
diff --git a/testdevapidocs/allclasses-frame.html 
b/testdevapidocs/allclasses-frame.html
index 5bf365d..9b62b4e 100644
--- a/testdevapidocs/allclasses-frame.html
+++ b/testdevapidocs/allclasses-frame.html
@@ -30,6 +30,10 @@
 AbstractTestWALReplay
 AbstractTestWALReplay.CustomStoreFlusher
 AbstractTestWALReplay.MockWAL
+AcidGuaranteesTestTool
+AcidGuaranteesTestTool.AtomicGetReader
+AcidGuaranteesTestTool.AtomicityWriter
+AcidGuaranteesTestTool.AtomicScanReader
 Action
 Action.ActionContext
 AddColumnAction
@@ -528,9 +532,6 @@
 TestAccessController3
 TestAccessController3.FaultyAccessController
 TestAcidGuarantees
-TestAcidGuarantees.AtomicGetReader
-TestAcidGuarantees.AtomicityWriter
-TestAcidGuarantees.AtomicScanReader
 TestActiveMasterManager
 TestActiveMasterManager.DummyMaster
 TestActiveMasterManager.NodeDeletionListener

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/testdevapidocs/allclasses-noframe.html
--
diff --git a/testdevapidocs/allclasses-noframe.html 
b/testdevapidocs/allclasses-noframe.html
index 320ee88..eb7f321 100644
--- a/testdevapidocs/allclasses-noframe.html
+++ b/testdevapidocs/allclasses-noframe.html
@@ -30,6 +30,10 @@
 AbstractTestWALReplay
 AbstractTestWALReplay.CustomStoreFlusher
 AbstractTestWALReplay.MockWAL
+AcidGuaranteesTestTool
+AcidGuaranteesTestTool.AtomicGetReader
+AcidGuaranteesTestTool.AtomicityWriter
+AcidGuaranteesTestTool.AtomicScanReader
 Action
 Action.ActionContext
 AddColumnAction
@@ -528,9 +532,6 @@
 TestAccessController3
 TestAccessController3.FaultyAccessController
 TestAcidGuarantees
-TestAcidGuarantees.AtomicGetReader
-TestAcidGuarantees.AtomicityWriter
-TestAcidGuarantees.AtomicScanReader
 TestActiveMasterManager
 TestActiveMasterManager.DummyMaster
 TestActiveMasterManager.NodeDeletionListener



[14/22] hbase-site git commit: Published site at 46cb5dfa226892fd2580f26ce9ce77225bd7e67c.

2018-04-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bc57a1a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
index aeebaec..f338c1a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class ThriftServerRunner.HBaseHandler
+public static class ThriftServerRunner.HBaseHandler
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 The HBaseHandler is a glue object that connects Thrift RPC 
calls to the
@@ -721,7 +721,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 conf
-protected org.apache.hadoop.conf.Configuration conf
+protected org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -730,7 +730,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 LOG
-protected static final org.slf4j.Logger LOG
+protected static final org.slf4j.Logger LOG
 
 
 
@@ -739,7 +739,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 nextScannerId
-protected int nextScannerId
+protected int nextScannerId
 
 
 
@@ -748,7 +748,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 scannerMap
-protected https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true";
 title="class or interface in java.util">HashMapInteger,ThriftServerRunner.ResultScannerWrapper> 
scannerMap
+protected https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true";
 title="class or interface in java.util">HashMapInteger,ThriftServerRunner.ResultScannerWrapper> 
scannerMap
 
 
 
@@ -757,7 +757,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 metrics
-private ThriftMetrics metrics
+private ThriftMetrics metrics
 
 
 
@@ -766,7 +766,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 connectionCache
-private final ConnectionCache connectionCache
+private final ConnectionCache connectionCache
 
 
 
@@ -775,7 +775,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 coalescer
-IncrementCoalescer coalescer
+IncrementCoalescer coalescer
 
 
 
@@ -784,7 +784,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 CLEANUP_INTERVAL
-static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLEANUP_INTERVAL
+static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLEANUP_INTERVAL
 
 See Also:
 Constant
 Field Values
@@ -797,7 +797,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 MAX_IDLETIME
-static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_IDLETIME
+static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_IDLETIME
 
 See Also:
 Constant
 Field Values
@@ -818,7 +818,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 HBaseHandler
-protected HBaseHandler(org.apache.hadoop.conf.Configuration c,
+protected HBaseHandler(org.apache.hadoop.conf.Configuration c,
UserProvider userProvider)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -841,7 +841,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 getAllColumns
-byte[][] getAllColumns(Table table)
+byte[][] getAllColumns(Table table)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Returns a list of all the column families for a given 
Table.
 
@@ -858,7 +858,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 getTable
-public Table getTable(byte[] tableName)
+public Table getTable(byte[] tableName)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interfa

[14/22] hbase-site git commit: Published site at da5fb27eabed4a4b4d251be973ee945fb52895bf.

2017-04-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7b1830cf/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
index 2a09edb..b6fb3b4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
@@ -37,131 +37,131 @@
 029import java.util.Comparator;
 030import java.util.LinkedList;
 031import java.util.List;
-032import java.util.Random;
-033
-034import 
org.apache.commons.cli.CommandLine;
-035import org.apache.commons.cli.Option;
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.fs.FSDataInputStream;
-041import 
org.apache.hadoop.fs.FSDataOutputStream;
-042import 
org.apache.hadoop.fs.FileChecksum;
-043import org.apache.hadoop.fs.FileStatus;
-044import org.apache.hadoop.fs.FileSystem;
-045import org.apache.hadoop.fs.FileUtil;
-046import org.apache.hadoop.fs.Path;
-047import 
org.apache.hadoop.fs.permission.FsPermission;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.HConstants;
-051import 
org.apache.hadoop.hbase.HRegionInfo;
-052import 
org.apache.hadoop.hbase.io.FileLink;
-053import 
org.apache.hadoop.hbase.io.HFileLink;
-054import 
org.apache.hadoop.hbase.io.WALLink;
-055import 
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-056import 
org.apache.hadoop.hbase.mob.MobUtils;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-060import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
-061import 
org.apache.hadoop.hbase.util.FSUtils;
-062import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-063import 
org.apache.hadoop.hbase.util.Pair;
-064import 
org.apache.hadoop.io.BytesWritable;
-065import org.apache.hadoop.io.IOUtils;
-066import 
org.apache.hadoop.io.NullWritable;
-067import org.apache.hadoop.io.Writable;
-068import org.apache.hadoop.mapreduce.Job;
-069import 
org.apache.hadoop.mapreduce.JobContext;
-070import 
org.apache.hadoop.mapreduce.Mapper;
-071import 
org.apache.hadoop.mapreduce.InputFormat;
-072import 
org.apache.hadoop.mapreduce.InputSplit;
-073import 
org.apache.hadoop.mapreduce.RecordReader;
-074import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-075import 
org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-076import 
org.apache.hadoop.mapreduce.security.TokenCache;
-077import 
org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;
-078import 
org.apache.hadoop.util.StringUtils;
-079import org.apache.hadoop.util.Tool;
-080
-081/**
-082 * Export the specified snapshot to a 
given FileSystem.
-083 *
-084 * The .snapshot/name folder is copied to 
the destination cluster
-085 * and then all the hfiles/wals are 
copied using a Map-Reduce Job in the .archive/ location.
-086 * When everything is done, the second 
cluster can restore the snapshot.
-087 */
-088@InterfaceAudience.Public
-089public class ExportSnapshot extends 
AbstractHBaseTool implements Tool {
-090  public static final String NAME = 
"exportsnapshot";
-091  /** Configuration prefix for overrides 
for the source filesystem */
-092  public static final String 
CONF_SOURCE_PREFIX = NAME + ".from.";
-093  /** Configuration prefix for overrides 
for the destination filesystem */
-094  public static final String 
CONF_DEST_PREFIX = NAME + ".to.";
-095
-096  private static final Log LOG = 
LogFactory.getLog(ExportSnapshot.class);
-097
-098  private static final String MR_NUM_MAPS 
= "mapreduce.job.maps";
-099  private static final String 
CONF_NUM_SPLITS = "snapshot.export.format.splits";
-100  private static final String 
CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";
-101  private static final String 
CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";
-102  private static final String 
CONF_FILES_USER = "snapshot.export.files.attributes.user";
-103  private static final String 
CONF_FILES_GROUP = "snapshot.export.files.attributes.group";
-104  private static final String 
CONF_FILES_MODE = "snapshot.export.files.attributes.mode";
-105  private static final String 
CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";
-106  private static final String 
CONF_OUTPUT_ROOT = "snapshot.export.output.root";
-107  private static final String 
CONF_I

[14/22] hbase-site git commit: Published site at ac5bb8155b618194fe9cf1131f0e72c99b7b534c.

2018-06-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a7f7dba1/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
index 99f53c4..2d5c946 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
@@ -123,7 +123,7 @@
 115  public static final boolean 
SPLIT_SKIP_ERRORS_DEFAULT = false;
 116
 117  // Parameters for split process
-118  protected final Path rootDir;
+118  protected final Path walDir;
 119  protected final FileSystem fs;
 120  protected final Configuration conf;
 121
@@ -156,14 +156,14 @@
 148
 149
 150  @VisibleForTesting
-151  WALSplitter(final WALFactory factory, 
Configuration conf, Path rootDir,
+151  WALSplitter(final WALFactory factory, 
Configuration conf, Path walDir,
 152  FileSystem fs, LastSequenceId 
idChecker,
 153  SplitLogWorkerCoordination 
splitLogWorkerCoordination) {
 154this.conf = 
HBaseConfiguration.create(conf);
 155String codecClassName = conf
 156
.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
 157
this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);
-158this.rootDir = rootDir;
+158this.walDir = walDir;
 159this.fs = fs;
 160this.sequenceIdChecker = idChecker;
 161this.splitLogWorkerCoordination = 
splitLogWorkerCoordination;
@@ -194,11 +194,11 @@
 186   * 

187 * @return false if it is interrupted by the progress-able. 188 */ -189 public static boolean splitLogFile(Path rootDir, FileStatus logfile, FileSystem fs, +189 public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem fs, 190 Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker, 191 SplitLogWorkerCoordination splitLogWorkerCoordination, final WALFactory factory) 192 throws IOException { -193WALSplitter s = new WALSplitter(factory, conf, rootDir, fs, idChecker, +193WALSplitter s = new WALSplitter(factory, conf, walDir, fs, idChecker, 194splitLogWorkerCoordination); 195return s.splitLogFile(logfile, reporter); 196 } @@ -330,10 +330,10 @@ 322 LOG.warn("Could not parse, corrupted WAL={}", logPath, e); 323 if (splitLogWorkerCoordination != null) { 324// Some tests pass in a csm of null. -325 splitLogWorkerCoordination.markCorrupted(rootDir, logfile.getPath().getName(), fs); +325 splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), fs); 326 } else { 327// for tests only -328ZKSplitLog.markCorrupted(rootDir, logfile.getPath().getName(), fs); +328ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), fs); 329 } 330 isCorrupted = true; 331} catch (IOException e) { @@ -465,1437 +465,1442 @@ 457 * logEntry: e.g. /hbase/some_table/2323432434/recovered.edits/2332. 458 * This method also ensures existence of RECOVERED_EDITS_DIR under the region 459 * creating it if necessary. -460 * @param fs -461 * @param logEntry -462 * @param rootDir HBase root dir. -463 * @param fileNameBeingSplit the file being split currently. Used to generate tmp file name. -464 * @return Path to file into which to dump split log edits. -465 * @throws IOException -466 */ -467 @SuppressWarnings("deprecation") -468 @VisibleForTesting -469 static Path getRegionSplitEditsPath(final FileSystem fs, -470 final Entry logEntry, final Path rootDir, String fileNameBeingSplit) -471 throws IOException { -472Path tableDir = FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName()); -473String encodedRegionName = Bytes.toString(logEntry.getKey().getEncodedRegionName()); -474Path regiondir = HRegion.getRegionDir(tableDir, encodedRegionName); -475Path dir = getRegionDirRecoveredEditsDir(regiondir); -476 -477if (!fs.exists(regiondir)) { -478 LOG.info("This region's directory does not exist: {}." -479 + "It is very likely that it was already split so it is " -480 + "safe to discard those edits.", regiondir); -481 return null; -482} -483if (fs.exists(dir) && fs.isFile(dir)) { -484 Path tmp = new Path("/tmp"); -485 if (!fs.exists(tmp)) { -486fs.mkdirs(tmp); -487 } -488 tmp = new Path(tmp, -489HConstants.RECOVERED_EDITS_DIR + "_" + encodedRegionName); -490 LOG.warn("Found existing old file: {}. It could be some " -491+ "leftover of an old installation. It should be a folder instead. " -492+ "So moving it to {}", dir, tmp); -493 if (!fs.rename(dir, tmp)) { -494LOG.warn("Failed to sideline old


[14/22] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9830530/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.OpenRegionRemoteCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.OpenRegionRemoteCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.OpenRegionRemoteCall.html
index f0a9b50..13cebd8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.OpenRegionRemoteCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.OpenRegionRemoteCall.html
@@ -69,7 +69,7 @@
 061  
"hbase.regionserver.rpc.startup.waittime";
 062  private static final int 
DEFAULT_RS_RPC_STARTUP_WAIT_TIME = 6;
 063
-064  private static final int 
RS_VERSION_WITH_EXEC_PROCS = 0x0201000; // 2.1
+064  private static final int 
RS_VERSION_WITH_EXEC_PROCS = 0x020; // 2.0
 065
 066  protected final MasterServices 
master;
 067  protected final long 
rsStartupWaitTime;
@@ -267,286 +267,289 @@
 259  final MasterProcedureEnv env = 
master.getMasterProcedureExecutor().getEnvironment();
 260
 261  request = 
ExecuteProceduresRequest.newBuilder();
-262  
splitAndResolveOperation(getServerName(), operations, this);
-263
-264  try {
-265final ExecuteProceduresResponse 
response = sendRequest(getServerName(), request.build());
-266remoteCallCompleted(env, 
response);
-267  } catch (IOException e) {
-268e = unwrapException(e);
-269// TODO: In the future some 
operation may want to bail out early.
-270// TODO: How many times should we 
retry (use numberOfAttemptsSoFar)
-271if (!scheduleForRetry(e)) {
-272  remoteCallFailed(env, e);
-273}
-274  }
-275  return null;
-276}
-277
-278public void 
dispatchOpenRequests(final MasterProcedureEnv env,
-279final 
List operations) {
-280  
request.addOpenRegion(buildOpenRegionRequest(env, getServerName(), 
operations));
-281}
-282
-283public void 
dispatchCloseRequests(final MasterProcedureEnv env,
-284final 
List operations) {
-285  for (RegionCloseOperation op: 
operations) {
-286
request.addCloseRegion(op.buildCloseRegionRequest(getServerName()));
-287  }
-288}
-289
-290protected ExecuteProceduresResponse 
sendRequest(final ServerName serverName,
-291final ExecuteProceduresRequest 
request) throws IOException {
-292  try {
-293return 
getRsAdmin().executeProcedures(null, request);
-294  } catch (ServiceException se) {
-295throw 
ProtobufUtil.getRemoteException(se);
-296  }
-297}
-298
-299
-300private void 
remoteCallCompleted(final MasterProcedureEnv env,
-301final ExecuteProceduresResponse 
response) {
-302  /*
-303  for (RemoteProcedure proc: 
operations) {
-304proc.remoteCallCompleted(env, 
getServerName(), response);
-305  }*/
-306}
-307
-308private void remoteCallFailed(final 
MasterProcedureEnv env, final IOException e) {
-309  for (RemoteProcedure proc: 
operations) {
-310proc.remoteCallFailed(env, 
getServerName(), e);
-311  }
-312}
-313  }
-314
-315  // 
==
-316  //  Compatibility calls
-317  //  Since we don't have a "batch 
proc-exec" request on the target RS
-318  //  we have to chunk the requests by 
type and dispatch the specific request.
-319  // 
==
-320  private static OpenRegionRequest 
buildOpenRegionRequest(final MasterProcedureEnv env,
-321  final ServerName serverName, final 
List operations) {
-322final OpenRegionRequest.Builder 
builder = OpenRegionRequest.newBuilder();
-323
builder.setServerStartCode(serverName.getStartcode());
-324
builder.setMasterSystemTime(EnvironmentEdgeManager.currentTime());
-325for (RegionOpenOperation op: 
operations) {
-326  
builder.addOpenInfo(op.buildRegionOpenInfoRequest(env));
-327}
-328return builder.build();
-329  }
-330
-331  private final class 
OpenRegionRemoteCall extends AbstractRSRemoteCall {
-332private final 
List operations;
+262  if (LOG.isTraceEnabled()) {
+263LOG.trace("Building request with 
operations count=" + operations.size());
+264  }
+265  
splitAndResolveOperation(getServerName(), operations, this);
+266
+267  try {
+268final ExecuteProceduresResponse 
response = sendRequest(getServerName(), request.build());
+269remoteCallCompleted(env, 
response);
+270  } catch (IOException e) {
+271e = unwrapException(e);
+272// TODO: In the future some 
operation may want to bail ou