http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d41f56fe/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
index 2ff9932..94e2ffd 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
@@ -41,190 +41,210 @@
 <span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.client.Scan;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.io.Writable;<a 
name="line.36"></a>
-<span class="sourceLineNo">037</span>import 
org.apache.hadoop.mapreduce.InputFormat;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import 
org.apache.hadoop.mapreduce.InputSplit;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.mapreduce.Job;<a 
name="line.39"></a>
-<span class="sourceLineNo">040</span>import 
org.apache.hadoop.mapreduce.JobContext;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import 
org.apache.hadoop.mapreduce.RecordReader;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import 
org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.43"></a>
-<span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;<a
 name="line.45"></a>
-<span class="sourceLineNo">046</span><a name="line.46"></a>
-<span class="sourceLineNo">047</span>/**<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * TableSnapshotInputFormat allows a 
MapReduce job to run over a table snapshot. The job<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * bypasses HBase servers, and directly 
accesses the underlying files (hfile, recovered edits,<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * wals, etc) directly to provide maximum 
performance. The snapshot is not required to be<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * restored to the live cluster or 
cloned. This also allows to run the mapreduce job from an<a name="line.51"></a>
-<span class="sourceLineNo">052</span> * online or offline hbase cluster. The 
snapshot files can be exported by using the<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * {@link 
org.apache.hadoop.hbase.snapshot.ExportSnapshot} tool, to a pure-hdfs 
cluster,<a name="line.53"></a>
-<span class="sourceLineNo">054</span> * and this InputFormat can be used to 
run the mapreduce job directly over the snapshot files.<a name="line.54"></a>
-<span class="sourceLineNo">055</span> * The snapshot should not be deleted 
while there are jobs reading from snapshot files.<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * &lt;p&gt;<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * Usage is similar to TableInputFormat, 
and<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * {@link 
TableMapReduceUtil#initTableSnapshotMapperJob(String, Scan, Class, Class, 
Class, Job, boolean, Path)}<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * can be used to configure the job.<a 
name="line.59"></a>
-<span class="sourceLineNo">060</span> * &lt;pre&gt;{@code<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * Job job = new Job(conf);<a 
name="line.61"></a>
-<span class="sourceLineNo">062</span> * Scan scan = new Scan();<a 
name="line.62"></a>
-<span class="sourceLineNo">063</span> * 
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,<a 
name="line.63"></a>
-<span class="sourceLineNo">064</span> *      scan, MyTableMapper.class, 
MyMapKeyOutput.class,<a name="line.64"></a>
-<span class="sourceLineNo">065</span> *      MyMapOutputValueWritable.class, 
job, true);<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * }<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * &lt;/pre&gt;<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * &lt;p&gt;<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * Internally, this input format restores 
the snapshot into the given tmp directory. Similar to<a name="line.69"></a>
-<span class="sourceLineNo">070</span> * {@link TableInputFormat} an InputSplit 
is created per region. The region is opened for reading<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * from each RecordReader. An internal 
RegionScanner is used to execute the<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * {@link 
org.apache.hadoop.hbase.CellScanner} obtained from the user.<a 
name="line.72"></a>
-<span class="sourceLineNo">073</span> * &lt;p&gt;<a name="line.73"></a>
-<span class="sourceLineNo">074</span> * HBase owns all the data and snapshot 
files on the filesystem. Only the 'hbase' user can read from<a 
name="line.74"></a>
-<span class="sourceLineNo">075</span> * snapshot files and data files.<a 
name="line.75"></a>
-<span class="sourceLineNo">076</span> * To read from snapshot files directly 
from the file system, the user who is running the MR job<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * must have sufficient permissions to 
access snapshot and reference files.<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * This means that to run mapreduce over 
snapshot files, the MR job has to be run as the HBase<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * user or the user must have group or 
other privileges in the filesystem (See HBASE-8369).<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * Note that, given other users access to 
read from snapshot/data files will completely circumvent<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * the access control enforced by 
HBase.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * @see 
org.apache.hadoop.hbase.client.TableSnapshotScanner<a name="line.82"></a>
-<span class="sourceLineNo">083</span> */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>@InterfaceAudience.Public<a 
name="line.84"></a>
-<span class="sourceLineNo">085</span>public class TableSnapshotInputFormat 
extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a 
name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public static class 
TableSnapshotRegionSplit extends InputSplit implements Writable {<a 
name="line.87"></a>
-<span class="sourceLineNo">088</span>    private 
TableSnapshotInputFormatImpl.InputSplit delegate;<a name="line.88"></a>
+<span class="sourceLineNo">036</span>import 
org.apache.hadoop.hbase.util.RegionSplitter;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.io.Writable;<a 
name="line.37"></a>
+<span class="sourceLineNo">038</span>import 
org.apache.hadoop.mapreduce.InputFormat;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import 
org.apache.hadoop.mapreduce.InputSplit;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.mapreduce.Job;<a 
name="line.40"></a>
+<span class="sourceLineNo">041</span>import 
org.apache.hadoop.mapreduce.JobContext;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import 
org.apache.hadoop.mapreduce.RecordReader;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import 
org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;<a
 name="line.46"></a>
+<span class="sourceLineNo">047</span><a name="line.47"></a>
+<span class="sourceLineNo">048</span>/**<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * TableSnapshotInputFormat allows a 
MapReduce job to run over a table snapshot. The job<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * bypasses HBase servers, and directly 
accesses the underlying files (hfile, recovered edits,<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * wals, etc) directly to provide maximum 
performance. The snapshot is not required to be<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * restored to the live cluster or 
cloned. This also allows to run the mapreduce job from an<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * online or offline hbase cluster. The 
snapshot files can be exported by using the<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * {@link 
org.apache.hadoop.hbase.snapshot.ExportSnapshot} tool, to a pure-hdfs 
cluster,<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * and this InputFormat can be used to 
run the mapreduce job directly over the snapshot files.<a name="line.55"></a>
+<span class="sourceLineNo">056</span> * The snapshot should not be deleted 
while there are jobs reading from snapshot files.<a name="line.56"></a>
+<span class="sourceLineNo">057</span> * &lt;p&gt;<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Usage is similar to TableInputFormat, 
and<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * {@link 
TableMapReduceUtil#initTableSnapshotMapperJob(String, Scan, Class, Class, 
Class, Job, boolean, Path)}<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * can be used to configure the job.<a 
name="line.60"></a>
+<span class="sourceLineNo">061</span> * &lt;pre&gt;{@code<a name="line.61"></a>
+<span class="sourceLineNo">062</span> * Job job = new Job(conf);<a 
name="line.62"></a>
+<span class="sourceLineNo">063</span> * Scan scan = new Scan();<a 
name="line.63"></a>
+<span class="sourceLineNo">064</span> * 
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,<a 
name="line.64"></a>
+<span class="sourceLineNo">065</span> *      scan, MyTableMapper.class, 
MyMapKeyOutput.class,<a name="line.65"></a>
+<span class="sourceLineNo">066</span> *      MyMapOutputValueWritable.class, 
job, true);<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * }<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * &lt;/pre&gt;<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * &lt;p&gt;<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * Internally, this input format restores 
the snapshot into the given tmp directory. By default,<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * and similar to {@link 
TableInputFormat} an InputSplit is created per region, but optionally you<a 
name="line.71"></a>
+<span class="sourceLineNo">072</span> * can run N mapper tasks per every 
region, in which case the region key range will be split to<a 
name="line.72"></a>
+<span class="sourceLineNo">073</span> * N sub-ranges and an InputSplit will be 
created per sub-range. The region is opened for reading<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * from each RecordReader. An internal 
RegionScanner is used to execute the<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * {@link 
org.apache.hadoop.hbase.CellScanner} obtained from the user.<a 
name="line.75"></a>
+<span class="sourceLineNo">076</span> * &lt;p&gt;<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * HBase owns all the data and snapshot 
files on the filesystem. Only the 'hbase' user can read from<a 
name="line.77"></a>
+<span class="sourceLineNo">078</span> * snapshot files and data files.<a 
name="line.78"></a>
+<span class="sourceLineNo">079</span> * To read from snapshot files directly 
from the file system, the user who is running the MR job<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * must have sufficient permissions to 
access snapshot and reference files.<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * This means that to run mapreduce over 
snapshot files, the MR job has to be run as the HBase<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * user or the user must have group or 
other privileges in the filesystem (See HBASE-8369).<a name="line.82"></a>
+<span class="sourceLineNo">083</span> * Note that, given other users access to 
read from snapshot/data files will completely circumvent<a name="line.83"></a>
+<span class="sourceLineNo">084</span> * the access control enforced by 
HBase.<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * @see 
org.apache.hadoop.hbase.client.TableSnapshotScanner<a name="line.85"></a>
+<span class="sourceLineNo">086</span> */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a 
name="line.87"></a>
+<span class="sourceLineNo">088</span>public class TableSnapshotInputFormat 
extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a 
name="line.88"></a>
 <span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    // constructor for mapreduce 
framework / Writable<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    public TableSnapshotRegionSplit() {<a 
name="line.91"></a>
-<span class="sourceLineNo">092</span>      this.delegate = new 
TableSnapshotInputFormatImpl.InputSplit();<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>    public 
TableSnapshotRegionSplit(TableSnapshotInputFormatImpl.InputSplit delegate) {<a 
name="line.95"></a>
-<span class="sourceLineNo">096</span>      this.delegate = delegate;<a 
name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>    public 
TableSnapshotRegionSplit(HTableDescriptor htd, HRegionInfo regionInfo,<a 
name="line.99"></a>
-<span class="sourceLineNo">100</span>        List&lt;String&gt; locations, 
Scan scan, Path restoreDir) {<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      this.delegate =<a 
name="line.101"></a>
-<span class="sourceLineNo">102</span>          new 
TableSnapshotInputFormatImpl.InputSplit(htd, regionInfo, locations, scan, 
restoreDir);<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>    @Override<a name="line.105"></a>
-<span class="sourceLineNo">106</span>    public long getLength() throws 
IOException, InterruptedException {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      return delegate.getLength();<a 
name="line.107"></a>
-<span class="sourceLineNo">108</span>    }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>    @Override<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    public String[] getLocations() throws 
IOException, InterruptedException {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      return delegate.getLocations();<a 
name="line.112"></a>
-<span class="sourceLineNo">113</span>    }<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>    @Override<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    public void write(DataOutput out) 
throws IOException {<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      delegate.write(out);<a 
name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>    @Override<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    public void readFields(DataInput in) 
throws IOException {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      delegate.readFields(in);<a 
name="line.122"></a>
-<span class="sourceLineNo">123</span>    }<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    /**<a name="line.125"></a>
-<span class="sourceLineNo">126</span>     * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0<a name="line.126"></a>
-<span class="sourceLineNo">127</span>     *             Use {@link 
#getRegion()}<a name="line.127"></a>
-<span class="sourceLineNo">128</span>     */<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    @Deprecated<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    public HRegionInfo getRegionInfo() 
{<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      return delegate.getRegionInfo();<a 
name="line.131"></a>
-<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>    public RegionInfo getRegion() {<a 
name="line.134"></a>
-<span class="sourceLineNo">135</span>      return delegate.getRegionInfo();<a 
name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @VisibleForTesting<a 
name="line.139"></a>
-<span class="sourceLineNo">140</span>  static class 
TableSnapshotRegionRecordReader extends<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      
RecordReader&lt;ImmutableBytesWritable, Result&gt; {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    private 
TableSnapshotInputFormatImpl.RecordReader delegate =<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      new 
TableSnapshotInputFormatImpl.RecordReader();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    private TaskAttemptContext context;<a 
name="line.144"></a>
-<span class="sourceLineNo">145</span>    private Method getCounter;<a 
name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>    @Override<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    public void initialize(InputSplit 
split, TaskAttemptContext context) throws IOException,<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        InterruptedException {<a 
name="line.149"></a>
-<span class="sourceLineNo">150</span>      this.context = context;<a 
name="line.150"></a>
-<span class="sourceLineNo">151</span>      getCounter = 
TableRecordReaderImpl.retrieveGetCounterWithStringsParams(context);<a 
name="line.151"></a>
-<span class="sourceLineNo">152</span>      delegate.initialize(<a 
name="line.152"></a>
-<span class="sourceLineNo">153</span>        ((TableSnapshotRegionSplit) 
split).delegate,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        context.getConfiguration());<a 
name="line.154"></a>
-<span class="sourceLineNo">155</span>    }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>    @Override<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    public boolean nextKeyValue() throws 
IOException, InterruptedException {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      boolean result = 
delegate.nextKeyValue();<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      if (result) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>        ScanMetrics scanMetrics = 
delegate.getScanner().getScanMetrics();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        if (scanMetrics != null 
&amp;&amp; context != null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>          
TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0);<a 
name="line.163"></a>
-<span class="sourceLineNo">164</span>        }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      return result;<a 
name="line.166"></a>
-<span class="sourceLineNo">167</span>    }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    @Override<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    public ImmutableBytesWritable 
getCurrentKey() throws IOException, InterruptedException {<a 
name="line.170"></a>
-<span class="sourceLineNo">171</span>      return delegate.getCurrentKey();<a 
name="line.171"></a>
-<span class="sourceLineNo">172</span>    }<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>    @Override<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    public Result getCurrentValue() 
throws IOException, InterruptedException {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>      return 
delegate.getCurrentValue();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>    @Override<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    public float getProgress() throws 
IOException, InterruptedException {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      return delegate.getProgress();<a 
name="line.181"></a>
-<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>    @Override<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    public void close() throws 
IOException {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      delegate.close();<a 
name="line.186"></a>
-<span class="sourceLineNo">187</span>    }<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  @Override<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public 
RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(<a 
name="line.191"></a>
-<span class="sourceLineNo">192</span>      InputSplit split, 
TaskAttemptContext context) throws IOException {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    return new 
TableSnapshotRegionRecordReader();<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  @Override<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  public List&lt;InputSplit&gt; 
getSplits(JobContext job) throws IOException, InterruptedException {<a 
name="line.197"></a>
-<span class="sourceLineNo">198</span>    List&lt;InputSplit&gt; results = new 
ArrayList&lt;&gt;();<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    for 
(TableSnapshotInputFormatImpl.InputSplit split :<a name="line.199"></a>
-<span class="sourceLineNo">200</span>        
TableSnapshotInputFormatImpl.getSplits(job.getConfiguration())) {<a 
name="line.200"></a>
-<span class="sourceLineNo">201</span>      results.add(new 
TableSnapshotRegionSplit(split));<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    return results;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Configures the job to use 
TableSnapshotInputFormat to read from a snapshot.<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * @param job the job to configure<a 
name="line.208"></a>
-<span class="sourceLineNo">209</span>   * @param snapshotName the name of the 
snapshot to read from<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   * @param restoreDir a temporary 
directory to restore the snapshot into. Current user should<a 
name="line.210"></a>
-<span class="sourceLineNo">211</span>   * have write permissions to this 
directory, and this should not be a subdirectory of rootdir.<a 
name="line.211"></a>
-<span class="sourceLineNo">212</span>   * After the job is finished, 
restoreDir can be deleted.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   * @throws IOException if an error 
occurs<a name="line.213"></a>
-<span class="sourceLineNo">214</span>   */<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  public static void setInput(Job job, 
String snapshotName, Path restoreDir)<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      throws IOException {<a 
name="line.216"></a>
-<span class="sourceLineNo">217</span>    
TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, 
restoreDir);<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>}<a name="line.219"></a>
+<span class="sourceLineNo">090</span>  public static class 
TableSnapshotRegionSplit extends InputSplit implements Writable {<a 
name="line.90"></a>
+<span class="sourceLineNo">091</span>    private 
TableSnapshotInputFormatImpl.InputSplit delegate;<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>    // constructor for mapreduce 
framework / Writable<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    public TableSnapshotRegionSplit() {<a 
name="line.94"></a>
+<span class="sourceLineNo">095</span>      this.delegate = new 
TableSnapshotInputFormatImpl.InputSplit();<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>    public 
TableSnapshotRegionSplit(TableSnapshotInputFormatImpl.InputSplit delegate) {<a 
name="line.98"></a>
+<span class="sourceLineNo">099</span>      this.delegate = delegate;<a 
name="line.99"></a>
+<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>    public 
TableSnapshotRegionSplit(HTableDescriptor htd, HRegionInfo regionInfo,<a 
name="line.102"></a>
+<span class="sourceLineNo">103</span>        List&lt;String&gt; locations, 
Scan scan, Path restoreDir) {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      this.delegate =<a 
name="line.104"></a>
+<span class="sourceLineNo">105</span>          new 
TableSnapshotInputFormatImpl.InputSplit(htd, regionInfo, locations, scan, 
restoreDir);<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    }<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>    @Override<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    public long getLength() throws 
IOException, InterruptedException {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      return delegate.getLength();<a 
name="line.110"></a>
+<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>    @Override<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    public String[] getLocations() throws 
IOException, InterruptedException {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      return delegate.getLocations();<a 
name="line.115"></a>
+<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>    @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    public void write(DataOutput out) 
throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>      delegate.write(out);<a 
name="line.120"></a>
+<span class="sourceLineNo">121</span>    }<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>    @Override<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    public void readFields(DataInput in) 
throws IOException {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      delegate.readFields(in);<a 
name="line.125"></a>
+<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>    /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>     * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0<a name="line.129"></a>
+<span class="sourceLineNo">130</span>     *             Use {@link 
#getRegion()}<a name="line.130"></a>
+<span class="sourceLineNo">131</span>     */<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    @Deprecated<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    public HRegionInfo getRegionInfo() 
{<a name="line.133"></a>
+<span class="sourceLineNo">134</span>      return delegate.getRegionInfo();<a 
name="line.134"></a>
+<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>    public RegionInfo getRegion() {<a 
name="line.137"></a>
+<span class="sourceLineNo">138</span>      return delegate.getRegionInfo();<a 
name="line.138"></a>
+<span class="sourceLineNo">139</span>    }<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  }<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  @VisibleForTesting<a 
name="line.142"></a>
+<span class="sourceLineNo">143</span>  static class 
TableSnapshotRegionRecordReader extends<a name="line.143"></a>
+<span class="sourceLineNo">144</span>      
RecordReader&lt;ImmutableBytesWritable, Result&gt; {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    private 
TableSnapshotInputFormatImpl.RecordReader delegate =<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      new 
TableSnapshotInputFormatImpl.RecordReader();<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    private TaskAttemptContext context;<a 
name="line.147"></a>
+<span class="sourceLineNo">148</span>    private Method getCounter;<a 
name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>    @Override<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    public void initialize(InputSplit 
split, TaskAttemptContext context) throws IOException,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        InterruptedException {<a 
name="line.152"></a>
+<span class="sourceLineNo">153</span>      this.context = context;<a 
name="line.153"></a>
+<span class="sourceLineNo">154</span>      getCounter = 
TableRecordReaderImpl.retrieveGetCounterWithStringsParams(context);<a 
name="line.154"></a>
+<span class="sourceLineNo">155</span>      delegate.initialize(<a 
name="line.155"></a>
+<span class="sourceLineNo">156</span>        ((TableSnapshotRegionSplit) 
split).delegate,<a name="line.156"></a>
+<span class="sourceLineNo">157</span>        context.getConfiguration());<a 
name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    @Override<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    public boolean nextKeyValue() throws 
IOException, InterruptedException {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      boolean result = 
delegate.nextKeyValue();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      if (result) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        ScanMetrics scanMetrics = 
delegate.getScanner().getScanMetrics();<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        if (scanMetrics != null 
&amp;&amp; context != null) {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          
TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0);<a 
name="line.166"></a>
+<span class="sourceLineNo">167</span>        }<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      return result;<a 
name="line.169"></a>
+<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>    @Override<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    public ImmutableBytesWritable 
getCurrentKey() throws IOException, InterruptedException {<a 
name="line.173"></a>
+<span class="sourceLineNo">174</span>      return delegate.getCurrentKey();<a 
name="line.174"></a>
+<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    @Override<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    public Result getCurrentValue() 
throws IOException, InterruptedException {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      return 
delegate.getCurrentValue();<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    }<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>    @Override<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    public float getProgress() throws 
IOException, InterruptedException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      return delegate.getProgress();<a 
name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>    @Override<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    public void close() throws 
IOException {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      delegate.close();<a 
name="line.189"></a>
+<span class="sourceLineNo">190</span>    }<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  }<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  @Override<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  public 
RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(<a 
name="line.194"></a>
+<span class="sourceLineNo">195</span>      InputSplit split, 
TaskAttemptContext context) throws IOException {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    return new 
TableSnapshotRegionRecordReader();<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  @Override<a name="line.199"></a>
+<span class="sourceLineNo">200</span>  public List&lt;InputSplit&gt; 
getSplits(JobContext job) throws IOException, InterruptedException {<a 
name="line.200"></a>
+<span class="sourceLineNo">201</span>    List&lt;InputSplit&gt; results = new 
ArrayList&lt;&gt;();<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    for 
(TableSnapshotInputFormatImpl.InputSplit split :<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        
TableSnapshotInputFormatImpl.getSplits(job.getConfiguration())) {<a 
name="line.203"></a>
+<span class="sourceLineNo">204</span>      results.add(new 
TableSnapshotRegionSplit(split));<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    }<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    return results;<a name="line.206"></a>
+<span class="sourceLineNo">207</span>  }<a name="line.207"></a>
+<span class="sourceLineNo">208</span><a name="line.208"></a>
+<span class="sourceLineNo">209</span>  /**<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * Configures the job to use 
TableSnapshotInputFormat to read from a snapshot.<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   * @param job the job to configure<a 
name="line.211"></a>
+<span class="sourceLineNo">212</span>   * @param snapshotName the name of the 
snapshot to read from<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   * @param restoreDir a temporary 
directory to restore the snapshot into. Current user should<a 
name="line.213"></a>
+<span class="sourceLineNo">214</span>   * have write permissions to this 
directory, and this should not be a subdirectory of rootdir.<a 
name="line.214"></a>
+<span class="sourceLineNo">215</span>   * After the job is finished, 
restoreDir can be deleted.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   * @throws IOException if an error 
occurs<a name="line.216"></a>
+<span class="sourceLineNo">217</span>   */<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  public static void setInput(Job job, 
String snapshotName, Path restoreDir)<a name="line.218"></a>
+<span class="sourceLineNo">219</span>      throws IOException {<a 
name="line.219"></a>
+<span class="sourceLineNo">220</span>    
TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, 
restoreDir);<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  }<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Configures the job to use 
TableSnapshotInputFormat to read from a snapshot.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * @param job the job to configure<a 
name="line.225"></a>
+<span class="sourceLineNo">226</span>   * @param snapshotName the name of the 
snapshot to read from<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * @param restoreDir a temporary 
directory to restore the snapshot into. Current user should<a 
name="line.227"></a>
+<span class="sourceLineNo">228</span>   * have write permissions to this 
directory, and this should not be a subdirectory of rootdir.<a 
name="line.228"></a>
+<span class="sourceLineNo">229</span>   * After the job is finished, 
restoreDir can be deleted.<a name="line.229"></a>
+<span class="sourceLineNo">230</span>   * @param splitAlgo split algorithm to 
generate splits from region<a name="line.230"></a>
+<span class="sourceLineNo">231</span>   * @param numSplitsPerRegion how many 
input splits to generate per one region<a name="line.231"></a>
+<span class="sourceLineNo">232</span>   * @throws IOException if an error 
occurs<a name="line.232"></a>
+<span class="sourceLineNo">233</span>   */<a name="line.233"></a>
+<span class="sourceLineNo">234</span>   public static void setInput(Job job, 
String snapshotName, Path restoreDir,<a name="line.234"></a>
+<span class="sourceLineNo">235</span>                               
RegionSplitter.SplitAlgorithm splitAlgo, int numSplitsPerRegion) throws 
IOException {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>     
TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, 
restoreDir,<a name="line.236"></a>
+<span class="sourceLineNo">237</span>             splitAlgo, 
numSplitsPerRegion);<a name="line.237"></a>
+<span class="sourceLineNo">238</span>   }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>}<a name="line.239"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d41f56fe/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index 9ad6214..245642b 100644
--- a/book.html
+++ b/book.html
@@ -35210,7 +35210,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2017-09-29 14:29:46 UTC
+Last updated 2017-09-30 14:29:35 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d41f56fe/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index 02ea3bc..45deac4 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20170929" />
+    <meta name="Date-Revision-yyyymmdd" content="20170930" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
                         <a href="https://www.apache.org/";>The Apache Software 
Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 
2017-09-29</li>
+                  <li id="publishDate" class="pull-right">Last Published: 
2017-09-30</li>
             </p>
                 </div>
 

Reply via email to