http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3b220124/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html ---------------------------------------------------------------------- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html index 57efbf3..e2dafb0 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html @@ -44,354 +44,355 @@ <span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.36"></a> <span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.37"></a> <span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.38"></a> -<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.39"></a> -<span class="sourceLineNo">040</span>import org.apache.hadoop.mapreduce.Job;<a name="line.40"></a> -<span class="sourceLineNo">041</span>import org.apache.hadoop.util.Tool;<a name="line.41"></a> -<span class="sourceLineNo">042</span>import org.apache.hadoop.util.ToolRunner;<a name="line.42"></a> -<span class="sourceLineNo">043</span><a name="line.43"></a> -<span class="sourceLineNo">044</span>/**<a name="line.44"></a> -<span class="sourceLineNo">045</span> * Tool used to copy a table to another one which can be on a different setup.<a name="line.45"></a> -<span class="sourceLineNo">046</span> * It is also configurable with a start and time as well as a specification<a name="line.46"></a> -<span class="sourceLineNo">047</span> * of the region server implementation if different from the local cluster.<a name="line.47"></a> -<span class="sourceLineNo">048</span> */<a name="line.48"></a> -<span class="sourceLineNo">049</span>@InterfaceAudience.Public<a name="line.49"></a> -<span class="sourceLineNo">050</span>public class CopyTable extends Configured implements Tool {<a name="line.50"></a> -<span class="sourceLineNo">051</span> private static final Log LOG = LogFactory.getLog(CopyTable.class);<a name="line.51"></a> -<span class="sourceLineNo">052</span><a name="line.52"></a> -<span class="sourceLineNo">053</span> final static String NAME = "copytable";<a name="line.53"></a> -<span class="sourceLineNo">054</span> long startTime = 0;<a name="line.54"></a> -<span class="sourceLineNo">055</span> long endTime = HConstants.LATEST_TIMESTAMP;<a name="line.55"></a> -<span class="sourceLineNo">056</span> int batch = Integer.MAX_VALUE;<a name="line.56"></a> -<span class="sourceLineNo">057</span> int cacheRow = -1;<a name="line.57"></a> -<span class="sourceLineNo">058</span> int versions = -1;<a name="line.58"></a> -<span class="sourceLineNo">059</span> String tableName = null;<a name="line.59"></a> -<span class="sourceLineNo">060</span> String startRow = null;<a name="line.60"></a> -<span class="sourceLineNo">061</span> String stopRow = null;<a name="line.61"></a> -<span class="sourceLineNo">062</span> String dstTableName = null;<a name="line.62"></a> -<span class="sourceLineNo">063</span> String peerAddress = null;<a name="line.63"></a> -<span class="sourceLineNo">064</span> String families = null;<a name="line.64"></a> -<span class="sourceLineNo">065</span> boolean allCells = false;<a name="line.65"></a> -<span class="sourceLineNo">066</span> static boolean shuffle = false;<a name="line.66"></a> -<span class="sourceLineNo">067</span><a name="line.67"></a> -<span class="sourceLineNo">068</span> boolean bulkload = false;<a name="line.68"></a> -<span class="sourceLineNo">069</span> Path bulkloadDir = null;<a name="line.69"></a> -<span class="sourceLineNo">070</span><a name="line.70"></a> -<span class="sourceLineNo">071</span> private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";<a name="line.71"></a> -<span class="sourceLineNo">072</span><a name="line.72"></a> -<span class="sourceLineNo">073</span> /**<a name="line.73"></a> -<span class="sourceLineNo">074</span> * Sets up the actual job.<a name="line.74"></a> -<span class="sourceLineNo">075</span> *<a name="line.75"></a> -<span class="sourceLineNo">076</span> * @param args The command line parameters.<a name="line.76"></a> -<span class="sourceLineNo">077</span> * @return The newly created job.<a name="line.77"></a> -<span class="sourceLineNo">078</span> * @throws IOException When setting up the job fails.<a name="line.78"></a> -<span class="sourceLineNo">079</span> */<a name="line.79"></a> -<span class="sourceLineNo">080</span> public Job createSubmittableJob(String[] args)<a name="line.80"></a> -<span class="sourceLineNo">081</span> throws IOException {<a name="line.81"></a> -<span class="sourceLineNo">082</span> if (!doCommandLine(args)) {<a name="line.82"></a> -<span class="sourceLineNo">083</span> return null;<a name="line.83"></a> -<span class="sourceLineNo">084</span> }<a name="line.84"></a> -<span class="sourceLineNo">085</span><a name="line.85"></a> -<span class="sourceLineNo">086</span> Job job = Job.getInstance(getConf(), getConf().get(JOB_NAME_CONF_KEY, NAME + "_" + tableName));<a name="line.86"></a> -<span class="sourceLineNo">087</span> job.setJarByClass(CopyTable.class);<a name="line.87"></a> -<span class="sourceLineNo">088</span> Scan scan = new Scan();<a name="line.88"></a> -<span class="sourceLineNo">089</span><a name="line.89"></a> -<span class="sourceLineNo">090</span> scan.setBatch(batch);<a name="line.90"></a> -<span class="sourceLineNo">091</span> scan.setCacheBlocks(false);<a name="line.91"></a> -<span class="sourceLineNo">092</span><a name="line.92"></a> -<span class="sourceLineNo">093</span> if (cacheRow > 0) {<a name="line.93"></a> -<span class="sourceLineNo">094</span> scan.setCaching(cacheRow);<a name="line.94"></a> -<span class="sourceLineNo">095</span> } else {<a name="line.95"></a> -<span class="sourceLineNo">096</span> scan.setCaching(getConf().getInt(HConstants.HBASE_CLIENT_SCANNER_CACHING, 100));<a name="line.96"></a> -<span class="sourceLineNo">097</span> }<a name="line.97"></a> -<span class="sourceLineNo">098</span><a name="line.98"></a> -<span class="sourceLineNo">099</span> scan.setTimeRange(startTime, endTime);<a name="line.99"></a> -<span class="sourceLineNo">100</span><a name="line.100"></a> -<span class="sourceLineNo">101</span> if (allCells) {<a name="line.101"></a> -<span class="sourceLineNo">102</span> scan.setRaw(true);<a name="line.102"></a> -<span class="sourceLineNo">103</span> }<a name="line.103"></a> -<span class="sourceLineNo">104</span> if (shuffle) {<a name="line.104"></a> -<span class="sourceLineNo">105</span> job.getConfiguration().set(TableInputFormat.SHUFFLE_MAPS, "true");<a name="line.105"></a> -<span class="sourceLineNo">106</span> }<a name="line.106"></a> -<span class="sourceLineNo">107</span> if (versions >= 0) {<a name="line.107"></a> -<span class="sourceLineNo">108</span> scan.setMaxVersions(versions);<a name="line.108"></a> -<span class="sourceLineNo">109</span> }<a name="line.109"></a> -<span class="sourceLineNo">110</span><a name="line.110"></a> -<span class="sourceLineNo">111</span> if (startRow != null) {<a name="line.111"></a> -<span class="sourceLineNo">112</span> scan.setStartRow(Bytes.toBytesBinary(startRow));<a name="line.112"></a> -<span class="sourceLineNo">113</span> }<a name="line.113"></a> -<span class="sourceLineNo">114</span><a name="line.114"></a> -<span class="sourceLineNo">115</span> if (stopRow != null) {<a name="line.115"></a> -<span class="sourceLineNo">116</span> scan.setStopRow(Bytes.toBytesBinary(stopRow));<a name="line.116"></a> -<span class="sourceLineNo">117</span> }<a name="line.117"></a> -<span class="sourceLineNo">118</span><a name="line.118"></a> -<span class="sourceLineNo">119</span> if(families != null) {<a name="line.119"></a> -<span class="sourceLineNo">120</span> String[] fams = families.split(",");<a name="line.120"></a> -<span class="sourceLineNo">121</span> Map<String,String> cfRenameMap = new HashMap<>();<a name="line.121"></a> -<span class="sourceLineNo">122</span> for(String fam : fams) {<a name="line.122"></a> -<span class="sourceLineNo">123</span> String sourceCf;<a name="line.123"></a> -<span class="sourceLineNo">124</span> if(fam.contains(":")) {<a name="line.124"></a> -<span class="sourceLineNo">125</span> // fam looks like "sourceCfName:destCfName"<a name="line.125"></a> -<span class="sourceLineNo">126</span> String[] srcAndDest = fam.split(":", 2);<a name="line.126"></a> -<span class="sourceLineNo">127</span> sourceCf = srcAndDest[0];<a name="line.127"></a> -<span class="sourceLineNo">128</span> String destCf = srcAndDest[1];<a name="line.128"></a> -<span class="sourceLineNo">129</span> cfRenameMap.put(sourceCf, destCf);<a name="line.129"></a> -<span class="sourceLineNo">130</span> } else {<a name="line.130"></a> -<span class="sourceLineNo">131</span> // fam is just "sourceCf"<a name="line.131"></a> -<span class="sourceLineNo">132</span> sourceCf = fam;<a name="line.132"></a> -<span class="sourceLineNo">133</span> }<a name="line.133"></a> -<span class="sourceLineNo">134</span> scan.addFamily(Bytes.toBytes(sourceCf));<a name="line.134"></a> -<span class="sourceLineNo">135</span> }<a name="line.135"></a> -<span class="sourceLineNo">136</span> Import.configureCfRenaming(job.getConfiguration(), cfRenameMap);<a name="line.136"></a> -<span class="sourceLineNo">137</span> }<a name="line.137"></a> -<span class="sourceLineNo">138</span> job.setNumReduceTasks(0);<a name="line.138"></a> -<span class="sourceLineNo">139</span><a name="line.139"></a> -<span class="sourceLineNo">140</span> if (bulkload) {<a name="line.140"></a> -<span class="sourceLineNo">141</span> TableMapReduceUtil.initTableMapperJob(tableName, scan, Import.KeyValueImporter.class, null,<a name="line.141"></a> -<span class="sourceLineNo">142</span> null, job);<a name="line.142"></a> -<span class="sourceLineNo">143</span><a name="line.143"></a> -<span class="sourceLineNo">144</span> // We need to split the inputs by destination tables so that output of Map can be bulk-loaded.<a name="line.144"></a> -<span class="sourceLineNo">145</span> TableInputFormat.configureSplitTable(job, TableName.valueOf(dstTableName));<a name="line.145"></a> -<span class="sourceLineNo">146</span><a name="line.146"></a> -<span class="sourceLineNo">147</span> FileSystem fs = FileSystem.get(getConf());<a name="line.147"></a> -<span class="sourceLineNo">148</span> Random rand = new Random();<a name="line.148"></a> -<span class="sourceLineNo">149</span> Path root = new Path(fs.getWorkingDirectory(), "copytable");<a name="line.149"></a> -<span class="sourceLineNo">150</span> fs.mkdirs(root);<a name="line.150"></a> -<span class="sourceLineNo">151</span> while (true) {<a name="line.151"></a> -<span class="sourceLineNo">152</span> bulkloadDir = new Path(root, "" + rand.nextLong());<a name="line.152"></a> -<span class="sourceLineNo">153</span> if (!fs.exists(bulkloadDir)) {<a name="line.153"></a> -<span class="sourceLineNo">154</span> break;<a name="line.154"></a> -<span class="sourceLineNo">155</span> }<a name="line.155"></a> -<span class="sourceLineNo">156</span> }<a name="line.156"></a> -<span class="sourceLineNo">157</span><a name="line.157"></a> -<span class="sourceLineNo">158</span> System.out.println("HFiles will be stored at " + this.bulkloadDir);<a name="line.158"></a> -<span class="sourceLineNo">159</span> HFileOutputFormat2.setOutputPath(job, bulkloadDir);<a name="line.159"></a> -<span class="sourceLineNo">160</span> try (Connection conn = ConnectionFactory.createConnection(getConf());<a name="line.160"></a> -<span class="sourceLineNo">161</span> Admin admin = conn.getAdmin()) {<a name="line.161"></a> -<span class="sourceLineNo">162</span> HFileOutputFormat2.configureIncrementalLoadMap(job,<a name="line.162"></a> -<span class="sourceLineNo">163</span> admin.listTableDescriptor((TableName.valueOf(dstTableName))));<a name="line.163"></a> -<span class="sourceLineNo">164</span> }<a name="line.164"></a> -<span class="sourceLineNo">165</span> } else {<a name="line.165"></a> -<span class="sourceLineNo">166</span> TableMapReduceUtil.initTableMapperJob(tableName, scan,<a name="line.166"></a> -<span class="sourceLineNo">167</span> Import.Importer.class, null, null, job);<a name="line.167"></a> -<span class="sourceLineNo">168</span><a name="line.168"></a> -<span class="sourceLineNo">169</span> TableMapReduceUtil.initTableReducerJob(dstTableName, null, job, null, peerAddress, null,<a name="line.169"></a> -<span class="sourceLineNo">170</span> null);<a name="line.170"></a> -<span class="sourceLineNo">171</span> }<a name="line.171"></a> -<span class="sourceLineNo">172</span><a name="line.172"></a> -<span class="sourceLineNo">173</span> return job;<a name="line.173"></a> -<span class="sourceLineNo">174</span> }<a name="line.174"></a> -<span class="sourceLineNo">175</span><a name="line.175"></a> -<span class="sourceLineNo">176</span> /*<a name="line.176"></a> -<span class="sourceLineNo">177</span> * @param errorMsg Error message. Can be null.<a name="line.177"></a> -<span class="sourceLineNo">178</span> */<a name="line.178"></a> -<span class="sourceLineNo">179</span> private static void printUsage(final String errorMsg) {<a name="line.179"></a> -<span class="sourceLineNo">180</span> if (errorMsg != null && errorMsg.length() > 0) {<a name="line.180"></a> -<span class="sourceLineNo">181</span> System.err.println("ERROR: " + errorMsg);<a name="line.181"></a> -<span class="sourceLineNo">182</span> }<a name="line.182"></a> -<span class="sourceLineNo">183</span> System.err.println("Usage: CopyTable [general options] [--starttime=X] [--endtime=Y] " +<a name="line.183"></a> -<span class="sourceLineNo">184</span> "[--new.name=NEW] [--peer.adr=ADR] <tablename>");<a name="line.184"></a> -<span class="sourceLineNo">185</span> System.err.println();<a name="line.185"></a> -<span class="sourceLineNo">186</span> System.err.println("Options:");<a name="line.186"></a> -<span class="sourceLineNo">187</span> System.err.println(" rs.class hbase.regionserver.class of the peer cluster");<a name="line.187"></a> -<span class="sourceLineNo">188</span> System.err.println(" specify if different from current cluster");<a name="line.188"></a> -<span class="sourceLineNo">189</span> System.err.println(" rs.impl hbase.regionserver.impl of the peer cluster");<a name="line.189"></a> -<span class="sourceLineNo">190</span> System.err.println(" startrow the start row");<a name="line.190"></a> -<span class="sourceLineNo">191</span> System.err.println(" stoprow the stop row");<a name="line.191"></a> -<span class="sourceLineNo">192</span> System.err.println(" starttime beginning of the time range (unixtime in millis)");<a name="line.192"></a> -<span class="sourceLineNo">193</span> System.err.println(" without endtime means from starttime to forever");<a name="line.193"></a> -<span class="sourceLineNo">194</span> System.err.println(" endtime end of the time range. Ignored if no starttime specified.");<a name="line.194"></a> -<span class="sourceLineNo">195</span> System.err.println(" versions number of cell versions to copy");<a name="line.195"></a> -<span class="sourceLineNo">196</span> System.err.println(" new.name new table's name");<a name="line.196"></a> -<span class="sourceLineNo">197</span> System.err.println(" peer.adr Address of the peer cluster given in the format");<a name="line.197"></a> -<span class="sourceLineNo">198</span> System.err.println(" hbase.zookeeper.quorum:hbase.zookeeper.client"<a name="line.198"></a> -<span class="sourceLineNo">199</span> + ".port:zookeeper.znode.parent");<a name="line.199"></a> -<span class="sourceLineNo">200</span> System.err.println(" families comma-separated list of families to copy");<a name="line.200"></a> -<span class="sourceLineNo">201</span> System.err.println(" To copy from cf1 to cf2, give sourceCfName:destCfName. ");<a name="line.201"></a> -<span class="sourceLineNo">202</span> System.err.println(" To keep the same name, just give \"cfName\"");<a name="line.202"></a> -<span class="sourceLineNo">203</span> System.err.println(" all.cells also copy delete markers and deleted cells");<a name="line.203"></a> -<span class="sourceLineNo">204</span> System.err.println(" bulkload Write input into HFiles and bulk load to the destination "<a name="line.204"></a> -<span class="sourceLineNo">205</span> + "table");<a name="line.205"></a> -<span class="sourceLineNo">206</span> System.err.println();<a name="line.206"></a> -<span class="sourceLineNo">207</span> System.err.println("Args:");<a name="line.207"></a> -<span class="sourceLineNo">208</span> System.err.println(" tablename Name of the table to copy");<a name="line.208"></a> -<span class="sourceLineNo">209</span> System.err.println();<a name="line.209"></a> -<span class="sourceLineNo">210</span> System.err.println("Examples:");<a name="line.210"></a> -<span class="sourceLineNo">211</span> System.err.println(" To copy 'TestTable' to a cluster that uses replication for a 1 hour window:");<a name="line.211"></a> -<span class="sourceLineNo">212</span> System.err.println(" $ hbase " +<a name="line.212"></a> -<span class="sourceLineNo">213</span> "org.apache.hadoop.hbase.mapreduce.CopyTable --starttime=1265875194289 --endtime=1265878794289 " +<a name="line.213"></a> -<span class="sourceLineNo">214</span> "--peer.adr=server1,server2,server3:2181:/hbase --families=myOldCf:myNewCf,cf2,cf3 TestTable ");<a name="line.214"></a> -<span class="sourceLineNo">215</span> System.err.println("For performance consider the following general option:\n"<a name="line.215"></a> -<span class="sourceLineNo">216</span> + " It is recommended that you set the following to >=100. A higher value uses more memory but\n"<a name="line.216"></a> -<span class="sourceLineNo">217</span> + " decreases the round trip time to the server and may increase performance.\n"<a name="line.217"></a> -<span class="sourceLineNo">218</span> + " -Dhbase.client.scanner.caching=100\n"<a name="line.218"></a> -<span class="sourceLineNo">219</span> + " The following should always be set to false, to prevent writing data twice, which may produce \n"<a name="line.219"></a> -<span class="sourceLineNo">220</span> + " inaccurate results.\n"<a name="line.220"></a> -<span class="sourceLineNo">221</span> + " -Dmapreduce.map.speculative=false");<a name="line.221"></a> -<span class="sourceLineNo">222</span> }<a name="line.222"></a> -<span class="sourceLineNo">223</span><a name="line.223"></a> -<span class="sourceLineNo">224</span> private boolean doCommandLine(final String[] args) {<a name="line.224"></a> -<span class="sourceLineNo">225</span> // Process command-line args. TODO: Better cmd-line processing<a name="line.225"></a> -<span class="sourceLineNo">226</span> // (but hopefully something not as painful as cli options).<a name="line.226"></a> -<span class="sourceLineNo">227</span> if (args.length < 1) {<a name="line.227"></a> -<span class="sourceLineNo">228</span> printUsage(null);<a name="line.228"></a> -<span class="sourceLineNo">229</span> return false;<a name="line.229"></a> -<span class="sourceLineNo">230</span> }<a name="line.230"></a> -<span class="sourceLineNo">231</span> try {<a name="line.231"></a> -<span class="sourceLineNo">232</span> for (int i = 0; i < args.length; i++) {<a name="line.232"></a> -<span class="sourceLineNo">233</span> String cmd = args[i];<a name="line.233"></a> -<span class="sourceLineNo">234</span> if (cmd.equals("-h") || cmd.startsWith("--h")) {<a name="line.234"></a> -<span class="sourceLineNo">235</span> printUsage(null);<a name="line.235"></a> -<span class="sourceLineNo">236</span> return false;<a name="line.236"></a> -<span class="sourceLineNo">237</span> }<a name="line.237"></a> -<span class="sourceLineNo">238</span><a name="line.238"></a> -<span class="sourceLineNo">239</span> final String startRowArgKey = "--startrow=";<a name="line.239"></a> -<span class="sourceLineNo">240</span> if (cmd.startsWith(startRowArgKey)) {<a name="line.240"></a> -<span class="sourceLineNo">241</span> startRow = cmd.substring(startRowArgKey.length());<a name="line.241"></a> -<span class="sourceLineNo">242</span> continue;<a name="line.242"></a> -<span class="sourceLineNo">243</span> }<a name="line.243"></a> -<span class="sourceLineNo">244</span><a name="line.244"></a> -<span class="sourceLineNo">245</span> final String stopRowArgKey = "--stoprow=";<a name="line.245"></a> -<span class="sourceLineNo">246</span> if (cmd.startsWith(stopRowArgKey)) {<a name="line.246"></a> -<span class="sourceLineNo">247</span> stopRow = cmd.substring(stopRowArgKey.length());<a name="line.247"></a> -<span class="sourceLineNo">248</span> continue;<a name="line.248"></a> -<span class="sourceLineNo">249</span> }<a name="line.249"></a> -<span class="sourceLineNo">250</span><a name="line.250"></a> -<span class="sourceLineNo">251</span> final String startTimeArgKey = "--starttime=";<a name="line.251"></a> -<span class="sourceLineNo">252</span> if (cmd.startsWith(startTimeArgKey)) {<a name="line.252"></a> -<span class="sourceLineNo">253</span> startTime = Long.parseLong(cmd.substring(startTimeArgKey.length()));<a name="line.253"></a> -<span class="sourceLineNo">254</span> continue;<a name="line.254"></a> -<span class="sourceLineNo">255</span> }<a name="line.255"></a> -<span class="sourceLineNo">256</span><a name="line.256"></a> -<span class="sourceLineNo">257</span> final String endTimeArgKey = "--endtime=";<a name="line.257"></a> -<span class="sourceLineNo">258</span> if (cmd.startsWith(endTimeArgKey)) {<a name="line.258"></a> -<span class="sourceLineNo">259</span> endTime = Long.parseLong(cmd.substring(endTimeArgKey.length()));<a name="line.259"></a> -<span class="sourceLineNo">260</span> continue;<a name="line.260"></a> -<span class="sourceLineNo">261</span> }<a name="line.261"></a> -<span class="sourceLineNo">262</span><a name="line.262"></a> -<span class="sourceLineNo">263</span> final String batchArgKey = "--batch=";<a name="line.263"></a> -<span class="sourceLineNo">264</span> if (cmd.startsWith(batchArgKey)) {<a name="line.264"></a> -<span class="sourceLineNo">265</span> batch = Integer.parseInt(cmd.substring(batchArgKey.length()));<a name="line.265"></a> -<span class="sourceLineNo">266</span> continue;<a name="line.266"></a> -<span class="sourceLineNo">267</span> }<a name="line.267"></a> -<span class="sourceLineNo">268</span><a name="line.268"></a> -<span class="sourceLineNo">269</span> final String cacheRowArgKey = "--cacheRow=";<a name="line.269"></a> -<span class="sourceLineNo">270</span> if (cmd.startsWith(cacheRowArgKey)) {<a name="line.270"></a> -<span class="sourceLineNo">271</span> cacheRow = Integer.parseInt(cmd.substring(cacheRowArgKey.length()));<a name="line.271"></a> -<span class="sourceLineNo">272</span> continue;<a name="line.272"></a> -<span class="sourceLineNo">273</span> }<a name="line.273"></a> -<span class="sourceLineNo">274</span><a name="line.274"></a> -<span class="sourceLineNo">275</span> final String versionsArgKey = "--versions=";<a name="line.275"></a> -<span class="sourceLineNo">276</span> if (cmd.startsWith(versionsArgKey)) {<a name="line.276"></a> -<span class="sourceLineNo">277</span> versions = Integer.parseInt(cmd.substring(versionsArgKey.length()));<a name="line.277"></a> -<span class="sourceLineNo">278</span> continue;<a name="line.278"></a> -<span class="sourceLineNo">279</span> }<a name="line.279"></a> -<span class="sourceLineNo">280</span><a name="line.280"></a> -<span class="sourceLineNo">281</span> final String newNameArgKey = "--new.name=";<a name="line.281"></a> -<span class="sourceLineNo">282</span> if (cmd.startsWith(newNameArgKey)) {<a name="line.282"></a> -<span class="sourceLineNo">283</span> dstTableName = cmd.substring(newNameArgKey.length());<a name="line.283"></a> -<span class="sourceLineNo">284</span> continue;<a name="line.284"></a> -<span class="sourceLineNo">285</span> }<a name="line.285"></a> -<span class="sourceLineNo">286</span><a name="line.286"></a> -<span class="sourceLineNo">287</span> final String peerAdrArgKey = "--peer.adr=";<a name="line.287"></a> -<span class="sourceLineNo">288</span> if (cmd.startsWith(peerAdrArgKey)) {<a name="line.288"></a> -<span class="sourceLineNo">289</span> peerAddress = cmd.substring(peerAdrArgKey.length());<a name="line.289"></a> -<span class="sourceLineNo">290</span> continue;<a name="line.290"></a> -<span class="sourceLineNo">291</span> }<a name="line.291"></a> -<span class="sourceLineNo">292</span><a name="line.292"></a> -<span class="sourceLineNo">293</span> final String familiesArgKey = "--families=";<a name="line.293"></a> -<span class="sourceLineNo">294</span> if (cmd.startsWith(familiesArgKey)) {<a name="line.294"></a> -<span class="sourceLineNo">295</span> families = cmd.substring(familiesArgKey.length());<a name="line.295"></a> -<span class="sourceLineNo">296</span> continue;<a name="line.296"></a> -<span class="sourceLineNo">297</span> }<a name="line.297"></a> -<span class="sourceLineNo">298</span><a name="line.298"></a> -<span class="sourceLineNo">299</span> if (cmd.startsWith("--all.cells")) {<a name="line.299"></a> -<span class="sourceLineNo">300</span> allCells = true;<a name="line.300"></a> -<span class="sourceLineNo">301</span> continue;<a name="line.301"></a> -<span class="sourceLineNo">302</span> }<a name="line.302"></a> -<span class="sourceLineNo">303</span><a name="line.303"></a> -<span class="sourceLineNo">304</span> if (cmd.startsWith("--bulkload")) {<a name="line.304"></a> -<span class="sourceLineNo">305</span> bulkload = true;<a name="line.305"></a> -<span class="sourceLineNo">306</span> continue;<a name="line.306"></a> -<span class="sourceLineNo">307</span> }<a name="line.307"></a> -<span class="sourceLineNo">308</span><a name="line.308"></a> -<span class="sourceLineNo">309</span> if (cmd.startsWith("--shuffle")) {<a name="line.309"></a> -<span class="sourceLineNo">310</span> shuffle = true;<a name="line.310"></a> -<span class="sourceLineNo">311</span> continue;<a name="line.311"></a> -<span class="sourceLineNo">312</span> }<a name="line.312"></a> -<span class="sourceLineNo">313</span><a name="line.313"></a> -<span class="sourceLineNo">314</span> if (i == args.length-1) {<a name="line.314"></a> -<span class="sourceLineNo">315</span> tableName = cmd;<a name="line.315"></a> -<span class="sourceLineNo">316</span> } else {<a name="line.316"></a> -<span class="sourceLineNo">317</span> printUsage("Invalid argument '" + cmd + "'");<a name="line.317"></a> -<span class="sourceLineNo">318</span> return false;<a name="line.318"></a> -<span class="sourceLineNo">319</span> }<a name="line.319"></a> -<span class="sourceLineNo">320</span> }<a name="line.320"></a> -<span class="sourceLineNo">321</span> if (dstTableName == null && peerAddress == null) {<a name="line.321"></a> -<span class="sourceLineNo">322</span> printUsage("At least a new table name or a " +<a name="line.322"></a> -<span class="sourceLineNo">323</span> "peer address must be specified");<a name="line.323"></a> -<span class="sourceLineNo">324</span> return false;<a name="line.324"></a> -<span class="sourceLineNo">325</span> }<a name="line.325"></a> -<span class="sourceLineNo">326</span> if ((endTime != 0) && (startTime > endTime)) {<a name="line.326"></a> -<span class="sourceLineNo">327</span> printUsage("Invalid time range filter: starttime=" + startTime + " > endtime=" + endTime);<a name="line.327"></a> -<span class="sourceLineNo">328</span> return false;<a name="line.328"></a> -<span class="sourceLineNo">329</span> }<a name="line.329"></a> -<span class="sourceLineNo">330</span><a name="line.330"></a> -<span class="sourceLineNo">331</span> if (bulkload && peerAddress != null) {<a name="line.331"></a> -<span class="sourceLineNo">332</span> printUsage("Remote bulkload is not supported!");<a name="line.332"></a> -<span class="sourceLineNo">333</span> return false;<a name="line.333"></a> -<span class="sourceLineNo">334</span> }<a name="line.334"></a> -<span class="sourceLineNo">335</span><a name="line.335"></a> -<span class="sourceLineNo">336</span> // set dstTableName if necessary<a name="line.336"></a> -<span class="sourceLineNo">337</span> if (dstTableName == null) {<a name="line.337"></a> -<span class="sourceLineNo">338</span> dstTableName = tableName;<a name="line.338"></a> -<span class="sourceLineNo">339</span> }<a name="line.339"></a> -<span class="sourceLineNo">340</span> } catch (Exception e) {<a name="line.340"></a> -<span class="sourceLineNo">341</span> e.printStackTrace();<a name="line.341"></a> -<span class="sourceLineNo">342</span> printUsage("Can't start because " + e.getMessage());<a name="line.342"></a> -<span class="sourceLineNo">343</span> return false;<a name="line.343"></a> -<span class="sourceLineNo">344</span> }<a name="line.344"></a> -<span class="sourceLineNo">345</span> return true;<a name="line.345"></a> -<span class="sourceLineNo">346</span> }<a name="line.346"></a> -<span class="sourceLineNo">347</span><a name="line.347"></a> -<span class="sourceLineNo">348</span> /**<a name="line.348"></a> -<span class="sourceLineNo">349</span> * Main entry point.<a name="line.349"></a> -<span class="sourceLineNo">350</span> *<a name="line.350"></a> -<span class="sourceLineNo">351</span> * @param args The command line parameters.<a name="line.351"></a> -<span class="sourceLineNo">352</span> * @throws Exception When running the job fails.<a name="line.352"></a> -<span class="sourceLineNo">353</span> */<a name="line.353"></a> -<span class="sourceLineNo">354</span> public static void main(String[] args) throws Exception {<a name="line.354"></a> -<span class="sourceLineNo">355</span> int ret = ToolRunner.run(HBaseConfiguration.create(), new CopyTable(), args);<a name="line.355"></a> -<span class="sourceLineNo">356</span> System.exit(ret);<a name="line.356"></a> -<span class="sourceLineNo">357</span> }<a name="line.357"></a> -<span class="sourceLineNo">358</span><a name="line.358"></a> -<span class="sourceLineNo">359</span> @Override<a name="line.359"></a> -<span class="sourceLineNo">360</span> public int run(String[] args) throws Exception {<a name="line.360"></a> -<span class="sourceLineNo">361</span> Job job = createSubmittableJob(args);<a name="line.361"></a> -<span class="sourceLineNo">362</span> if (job == null) return 1;<a name="line.362"></a> -<span class="sourceLineNo">363</span> if (!job.waitForCompletion(true)) {<a name="line.363"></a> -<span class="sourceLineNo">364</span> LOG.info("Map-reduce job failed!");<a name="line.364"></a> -<span class="sourceLineNo">365</span> if (bulkload) {<a name="line.365"></a> -<span class="sourceLineNo">366</span> LOG.info("Files are not bulkloaded!");<a name="line.366"></a> -<span class="sourceLineNo">367</span> }<a name="line.367"></a> -<span class="sourceLineNo">368</span> return 1;<a name="line.368"></a> -<span class="sourceLineNo">369</span> }<a name="line.369"></a> -<span class="sourceLineNo">370</span> int code = 0;<a name="line.370"></a> -<span class="sourceLineNo">371</span> if (bulkload) {<a name="line.371"></a> -<span class="sourceLineNo">372</span> code = new LoadIncrementalHFiles(this.getConf()).run(new String[]{this.bulkloadDir.toString(),<a name="line.372"></a> -<span class="sourceLineNo">373</span> this.dstTableName});<a name="line.373"></a> -<span class="sourceLineNo">374</span> if (code == 0) {<a name="line.374"></a> -<span class="sourceLineNo">375</span> // bulkloadDir is deleted only LoadIncrementalHFiles was successful so that one can rerun<a name="line.375"></a> -<span class="sourceLineNo">376</span> // LoadIncrementalHFiles.<a name="line.376"></a> -<span class="sourceLineNo">377</span> FileSystem fs = FileSystem.get(this.getConf());<a name="line.377"></a> -<span class="sourceLineNo">378</span> if (!fs.delete(this.bulkloadDir, true)) {<a name="line.378"></a> -<span class="sourceLineNo">379</span> LOG.error("Deleting folder " + bulkloadDir + " failed!");<a name="line.379"></a> -<span class="sourceLineNo">380</span> code = 1;<a name="line.380"></a> -<span class="sourceLineNo">381</span> }<a name="line.381"></a> -<span class="sourceLineNo">382</span> }<a name="line.382"></a> -<span class="sourceLineNo">383</span> }<a name="line.383"></a> -<span class="sourceLineNo">384</span> return code;<a name="line.384"></a> -<span class="sourceLineNo">385</span> }<a name="line.385"></a> -<span class="sourceLineNo">386</span>}<a name="line.386"></a> +<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;<a name="line.39"></a> +<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.40"></a> +<span class="sourceLineNo">041</span>import org.apache.hadoop.mapreduce.Job;<a name="line.41"></a> +<span class="sourceLineNo">042</span>import org.apache.hadoop.util.Tool;<a name="line.42"></a> +<span class="sourceLineNo">043</span>import org.apache.hadoop.util.ToolRunner;<a name="line.43"></a> +<span class="sourceLineNo">044</span><a name="line.44"></a> +<span class="sourceLineNo">045</span>/**<a name="line.45"></a> +<span class="sourceLineNo">046</span> * Tool used to copy a table to another one which can be on a different setup.<a name="line.46"></a> +<span class="sourceLineNo">047</span> * It is also configurable with a start and time as well as a specification<a name="line.47"></a> +<span class="sourceLineNo">048</span> * of the region server implementation if different from the local cluster.<a name="line.48"></a> +<span class="sourceLineNo">049</span> */<a name="line.49"></a> +<span class="sourceLineNo">050</span>@InterfaceAudience.Public<a name="line.50"></a> +<span class="sourceLineNo">051</span>public class CopyTable extends Configured implements Tool {<a name="line.51"></a> +<span class="sourceLineNo">052</span> private static final Log LOG = LogFactory.getLog(CopyTable.class);<a name="line.52"></a> +<span class="sourceLineNo">053</span><a name="line.53"></a> +<span class="sourceLineNo">054</span> final static String NAME = "copytable";<a name="line.54"></a> +<span class="sourceLineNo">055</span> long startTime = 0;<a name="line.55"></a> +<span class="sourceLineNo">056</span> long endTime = HConstants.LATEST_TIMESTAMP;<a name="line.56"></a> +<span class="sourceLineNo">057</span> int batch = Integer.MAX_VALUE;<a name="line.57"></a> +<span class="sourceLineNo">058</span> int cacheRow = -1;<a name="line.58"></a> +<span class="sourceLineNo">059</span> int versions = -1;<a name="line.59"></a> +<span class="sourceLineNo">060</span> String tableName = null;<a name="line.60"></a> +<span class="sourceLineNo">061</span> String startRow = null;<a name="line.61"></a> +<span class="sourceLineNo">062</span> String stopRow = null;<a name="line.62"></a> +<span class="sourceLineNo">063</span> String dstTableName = null;<a name="line.63"></a> +<span class="sourceLineNo">064</span> String peerAddress = null;<a name="line.64"></a> +<span class="sourceLineNo">065</span> String families = null;<a name="line.65"></a> +<span class="sourceLineNo">066</span> boolean allCells = false;<a name="line.66"></a> +<span class="sourceLineNo">067</span> static boolean shuffle = false;<a name="line.67"></a> +<span class="sourceLineNo">068</span><a name="line.68"></a> +<span class="sourceLineNo">069</span> boolean bulkload = false;<a name="line.69"></a> +<span class="sourceLineNo">070</span> Path bulkloadDir = null;<a name="line.70"></a> +<span class="sourceLineNo">071</span><a name="line.71"></a> +<span class="sourceLineNo">072</span> private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";<a name="line.72"></a> +<span class="sourceLineNo">073</span><a name="line.73"></a> +<span class="sourceLineNo">074</span> /**<a name="line.74"></a> +<span class="sourceLineNo">075</span> * Sets up the actual job.<a name="line.75"></a> +<span class="sourceLineNo">076</span> *<a name="line.76"></a> +<span class="sourceLineNo">077</span> * @param args The command line parameters.<a name="line.77"></a> +<span class="sourceLineNo">078</span> * @return The newly created job.<a name="line.78"></a> +<span class="sourceLineNo">079</span> * @throws IOException When setting up the job fails.<a name="line.79"></a> +<span class="sourceLineNo">080</span> */<a name="line.80"></a> +<span class="sourceLineNo">081</span> public Job createSubmittableJob(String[] args)<a name="line.81"></a> +<span class="sourceLineNo">082</span> throws IOException {<a name="line.82"></a> +<span class="sourceLineNo">083</span> if (!doCommandLine(args)) {<a name="line.83"></a> +<span class="sourceLineNo">084</span> return null;<a name="line.84"></a> +<span class="sourceLineNo">085</span> }<a name="line.85"></a> +<span class="sourceLineNo">086</span><a name="line.86"></a> +<span class="sourceLineNo">087</span> Job job = Job.getInstance(getConf(), getConf().get(JOB_NAME_CONF_KEY, NAME + "_" + tableName));<a name="line.87"></a> +<span class="sourceLineNo">088</span> job.setJarByClass(CopyTable.class);<a name="line.88"></a> +<span class="sourceLineNo">089</span> Scan scan = new Scan();<a name="line.89"></a> +<span class="sourceLineNo">090</span><a name="line.90"></a> +<span class="sourceLineNo">091</span> scan.setBatch(batch);<a name="line.91"></a> +<span class="sourceLineNo">092</span> scan.setCacheBlocks(false);<a name="line.92"></a> +<span class="sourceLineNo">093</span><a name="line.93"></a> +<span class="sourceLineNo">094</span> if (cacheRow > 0) {<a name="line.94"></a> +<span class="sourceLineNo">095</span> scan.setCaching(cacheRow);<a name="line.95"></a> +<span class="sourceLineNo">096</span> } else {<a name="line.96"></a> +<span class="sourceLineNo">097</span> scan.setCaching(getConf().getInt(HConstants.HBASE_CLIENT_SCANNER_CACHING, 100));<a name="line.97"></a> +<span class="sourceLineNo">098</span> }<a name="line.98"></a> +<span class="sourceLineNo">099</span><a name="line.99"></a> +<span class="sourceLineNo">100</span> scan.setTimeRange(startTime, endTime);<a name="line.100"></a> +<span class="sourceLineNo">101</span><a name="line.101"></a> +<span class="sourceLineNo">102</span> if (allCells) {<a name="line.102"></a> +<span class="sourceLineNo">103</span> scan.setRaw(true);<a name="line.103"></a> +<span class="sourceLineNo">104</span> }<a name="line.104"></a> +<span class="sourceLineNo">105</span> if (shuffle) {<a name="line.105"></a> +<span class="sourceLineNo">106</span> job.getConfiguration().set(TableInputFormat.SHUFFLE_MAPS, "true");<a name="line.106"></a> +<span class="sourceLineNo">107</span> }<a name="line.107"></a> +<span class="sourceLineNo">108</span> if (versions >= 0) {<a name="line.108"></a> +<span class="sourceLineNo">109</span> scan.setMaxVersions(versions);<a name="line.109"></a> +<span class="sourceLineNo">110</span> }<a name="line.110"></a> +<span class="sourceLineNo">111</span><a name="line.111"></a> +<span class="sourceLineNo">112</span> if (startRow != null) {<a name="line.112"></a> +<span class="sourceLineNo">113</span> scan.setStartRow(Bytes.toBytesBinary(startRow));<a name="line.113"></a> +<span class="sourceLineNo">114</span> }<a name="line.114"></a> +<span class="sourceLineNo">115</span><a name="line.115"></a> +<span class="sourceLineNo">116</span> if (stopRow != null) {<a name="line.116"></a> +<span class="sourceLineNo">117</span> scan.setStopRow(Bytes.toBytesBinary(stopRow));<a name="line.117"></a> +<span class="sourceLineNo">118</span> }<a name="line.118"></a> +<span class="sourceLineNo">119</span><a name="line.119"></a> +<span class="sourceLineNo">120</span> if(families != null) {<a name="line.120"></a> +<span class="sourceLineNo">121</span> String[] fams = families.split(",");<a name="line.121"></a> +<span class="sourceLineNo">122</span> Map<String,String> cfRenameMap = new HashMap<>();<a name="line.122"></a> +<span class="sourceLineNo">123</span> for(String fam : fams) {<a name="line.123"></a> +<span class="sourceLineNo">124</span> String sourceCf;<a name="line.124"></a> +<span class="sourceLineNo">125</span> if(fam.contains(":")) {<a name="line.125"></a> +<span class="sourceLineNo">126</span> // fam looks like "sourceCfName:destCfName"<a name="line.126"></a> +<span class="sourceLineNo">127</span> String[] srcAndDest = fam.split(":", 2);<a name="line.127"></a> +<span class="sourceLineNo">128</span> sourceCf = srcAndDest[0];<a name="line.128"></a> +<span class="sourceLineNo">129</span> String destCf = srcAndDest[1];<a name="line.129"></a> +<span class="sourceLineNo">130</span> cfRenameMap.put(sourceCf, destCf);<a name="line.130"></a> +<span class="sourceLineNo">131</span> } else {<a name="line.131"></a> +<span class="sourceLineNo">132</span> // fam is just "sourceCf"<a name="line.132"></a> +<span class="sourceLineNo">133</span> sourceCf = fam;<a name="line.133"></a> +<span class="sourceLineNo">134</span> }<a name="line.134"></a> +<span class="sourceLineNo">135</span> scan.addFamily(Bytes.toBytes(sourceCf));<a name="line.135"></a> +<span class="sourceLineNo">136</span> }<a name="line.136"></a> +<span class="sourceLineNo">137</span> Import.configureCfRenaming(job.getConfiguration(), cfRenameMap);<a name="line.137"></a> +<span class="sourceLineNo">138</span> }<a name="line.138"></a> +<span class="sourceLineNo">139</span> job.setNumReduceTasks(0);<a name="line.139"></a> +<span class="sourceLineNo">140</span><a name="line.140"></a> +<span class="sourceLineNo">141</span> if (bulkload) {<a name="line.141"></a> +<span class="sourceLineNo">142</span> TableMapReduceUtil.initTableMapperJob(tableName, scan, Import.KeyValueImporter.class, null,<a name="line.142"></a> +<span class="sourceLineNo">143</span> null, job);<a name="line.143"></a> +<span class="sourceLineNo">144</span><a name="line.144"></a> +<span class="sourceLineNo">145</span> // We need to split the inputs by destination tables so that output of Map can be bulk-loaded.<a name="line.145"></a> +<span class="sourceLineNo">146</span> TableInputFormat.configureSplitTable(job, TableName.valueOf(dstTableName));<a name="line.146"></a> +<span class="sourceLineNo">147</span><a name="line.147"></a> +<span class="sourceLineNo">148</span> FileSystem fs = FileSystem.get(getConf());<a name="line.148"></a> +<span class="sourceLineNo">149</span> Random rand = new Random();<a name="line.149"></a> +<span class="sourceLineNo">150</span> Path root = new Path(fs.getWorkingDirectory(), "copytable");<a name="line.150"></a> +<span class="sourceLineNo">151</span> fs.mkdirs(root);<a name="line.151"></a> +<span class="sourceLineNo">152</span> while (true) {<a name="line.152"></a> +<span class="sourceLineNo">153</span> bulkloadDir = new Path(root, "" + rand.nextLong());<a name="line.153"></a> +<span class="sourceLineNo">154</span> if (!fs.exists(bulkloadDir)) {<a name="line.154"></a> +<span class="sourceLineNo">155</span> break;<a name="line.155"></a> +<span class="sourceLineNo">156</span> }<a name="line.156"></a> +<span class="sourceLineNo">157</span> }<a name="line.157"></a> +<span class="sourceLineNo">158</span><a name="line.158"></a> +<span class="sourceLineNo">159</span> System.out.println("HFiles will be stored at " + this.bulkloadDir);<a name="line.159"></a> +<span class="sourceLineNo">160</span> HFileOutputFormat2.setOutputPath(job, bulkloadDir);<a name="line.160"></a> +<span class="sourceLineNo">161</span> try (Connection conn = ConnectionFactory.createConnection(getConf());<a name="line.161"></a> +<span class="sourceLineNo">162</span> Admin admin = conn.getAdmin()) {<a name="line.162"></a> +<span class="sourceLineNo">163</span> HFileOutputFormat2.configureIncrementalLoadMap(job,<a name="line.163"></a> +<span class="sourceLineNo">164</span> admin.listTableDescriptor((TableName.valueOf(dstTableName))));<a name="line.164"></a> +<span class="sourceLineNo">165</span> }<a name="line.165"></a> +<span class="sourceLineNo">166</span> } else {<a name="line.166"></a> +<span class="sourceLineNo">167</span> TableMapReduceUtil.initTableMapperJob(tableName, scan,<a name="line.167"></a> +<span class="sourceLineNo">168</span> Import.Importer.class, null, null, job);<a name="line.168"></a> +<span class="sourceLineNo">169</span><a name="line.169"></a> +<span class="sourceLineNo">170</span> TableMapReduceUtil.initTableReducerJob(dstTableName, null, job, null, peerAddress, null,<a name="line.170"></a> +<span class="sourceLineNo">171</span> null);<a name="line.171"></a> +<span class="sourceLineNo">172</span> }<a name="line.172"></a> +<span class="sourceLineNo">173</span><a name="line.173"></a> +<span class="sourceLineNo">174</span> return job;<a name="line.174"></a> +<span class="sourceLineNo">175</span> }<a name="line.175"></a> +<span class="sourceLineNo">176</span><a name="line.176"></a> +<span class="sourceLineNo">177</span> /*<a name="line.177"></a> +<span class="sourceLineNo">178</span> * @param errorMsg Error message. Can be null.<a name="line.178"></a> +<span class="sourceLineNo">179</span> */<a name="line.179"></a> +<span class="sourceLineNo">180</span> private static void printUsage(final String errorMsg) {<a name="line.180"></a> +<span class="sourceLineNo">181</span> if (errorMsg != null && errorMsg.length() > 0) {<a name="line.181"></a> +<span class="sourceLineNo">182</span> System.err.println("ERROR: " + errorMsg);<a name="line.182"></a> +<span class="sourceLineNo">183</span> }<a name="line.183"></a> +<span class="sourceLineNo">184</span> System.err.println("Usage: CopyTable [general options] [--starttime=X] [--endtime=Y] " +<a name="line.184"></a> +<span class="sourceLineNo">185</span> "[--new.name=NEW] [--peer.adr=ADR] <tablename>");<a name="line.185"></a> +<span class="sourceLineNo">186</span> System.err.println();<a name="line.186"></a> +<span class="sourceLineNo">187</span> System.err.println("Options:");<a name="line.187"></a> +<span class="sourceLineNo">188</span> System.err.println(" rs.class hbase.regionserver.class of the peer cluster");<a name="line.188"></a> +<span class="sourceLineNo">189</span> System.err.println(" specify if different from current cluster");<a name="line.189"></a> +<span class="sourceLineNo">190</span> System.err.println(" rs.impl hbase.regionserver.impl of the peer cluster");<a name="line.190"></a> +<span class="sourceLineNo">191</span> System.err.println(" startrow the start row");<a name="line.191"></a> +<span class="sourceLineNo">192</span> System.err.println(" stoprow the stop row");<a name="line.192"></a> +<span class="sourceLineNo">193</span> System.err.println(" starttime beginning of the time range (unixtime in millis)");<a name="line.193"></a> +<span class="sourceLineNo">194</span> System.err.println(" without endtime means from starttime to forever");<a name="line.194"></a> +<span class="sourceLineNo">195</span> System.err.println(" endtime end of the time range. Ignored if no starttime specified.");<a name="line.195"></a> +<span class="sourceLineNo">196</span> System.err.println(" versions number of cell versions to copy");<a name="line.196"></a> +<span class="sourceLineNo">197</span> System.err.println(" new.name new table's name");<a name="line.197"></a> +<span class="sourceLineNo">198</span> System.err.println(" peer.adr Address of the peer cluster given in the format");<a name="line.198"></a> +<span class="sourceLineNo">199</span> System.err.println(" hbase.zookeeper.quorum:hbase.zookeeper.client"<a name="line.199"></a> +<span class="sourceLineNo">200</span> + ".port:zookeeper.znode.parent");<a name="line.200"></a> +<span class="sourceLineNo">201</span> System.err.println(" families comma-separated list of families to copy");<a name="line.201"></a> +<span class="sourceLineNo">202</span> System.err.println(" To copy from cf1 to cf2, give sourceCfName:destCfName. ");<a name="line.202"></a> +<span class="sourceLineNo">203</span> System.err.println(" To keep the same name, just give \"cfName\"");<a name="line.203"></a> +<span class="sourceLineNo">204</span> System.err.println(" all.cells also copy delete markers and deleted cells");<a name="line.204"></a> +<span class="sourceLineNo">205</span> System.err.println(" bulkload Write input into HFiles and bulk load to the destination "<a name="line.205"></a> +<span class="sourceLineNo">206</span> + "table");<a name="line.206"></a> +<span class="sourceLineNo">207</span> System.err.println();<a name="line.207"></a> +<span class="sourceLineNo">208</span> System.err.println("Args:");<a name="line.208"></a> +<span class="sourceLineNo">209</span> System.err.println(" tablename Name of the table to copy");<a name="line.209"></a> +<span class="sourceLineNo">210</span> System.err.println();<a name="line.210"></a> +<span class="sourceLineNo">211</span> System.err.println("Examples:");<a name="line.211"></a> +<span class="sourceLineNo">212</span> System.err.println(" To copy 'TestTable' to a cluster that uses replication for a 1 hour window:");<a name="line.212"></a> +<span class="sourceLineNo">213</span> System.err.println(" $ hbase " +<a name="line.213"></a> +<span class="sourceLineNo">214</span> "org.apache.hadoop.hbase.mapreduce.CopyTable --starttime=1265875194289 --endtime=1265878794289 " +<a name="line.214"></a> +<span class="sourceLineNo">215</span> "--peer.adr=server1,server2,server3:2181:/hbase --families=myOldCf:myNewCf,cf2,cf3 TestTable ");<a name="line.215"></a> +<span class="sourceLineNo">216</span> System.err.println("For performance consider the following general option:\n"<a name="line.216"></a> +<span class="sourceLineNo">217</span> + " It is recommended that you set the following to >=100. A higher value uses more memory but\n"<a name="line.217"></a> +<span class="sourceLineNo">218</span> + " decreases the round trip time to the server and may increase performance.\n"<a name="line.218"></a> +<span class="sourceLineNo">219</span> + " -Dhbase.client.scanner.caching=100\n"<a name="line.219"></a> +<span class="sourceLineNo">220</span> + " The following should always be set to false, to prevent writing data twice, which may produce \n"<a name="line.220"></a> +<span class="sourceLineNo">221</span> + " inaccurate results.\n"<a name="line.221"></a> +<span class="sourceLineNo">222</span> + " -Dmapreduce.map.speculative=false");<a name="line.222"></a> +<span class="sourceLineNo">223</span> }<a name="line.223"></a> +<span class="sourceLineNo">224</span><a name="line.224"></a> +<span class="sourceLineNo">225</span> private boolean doCommandLine(final String[] args) {<a name="line.225"></a> +<span class="sourceLineNo">226</span> // Process command-line args. TODO: Better cmd-line processing<a name="line.226"></a> +<span class="sourceLineNo">227</span> // (but hopefully something not as painful as cli options).<a name="line.227"></a> +<span class="sourceLineNo">228</span> if (args.length < 1) {<a name="line.228"></a> +<span class="sourceLineNo">229</span> printUsage(null);<a name="line.229"></a> +<span class="sourceLineNo">230</span> return false;<a name="line.230"></a> +<span class="sourceLineNo">231</span> }<a name="line.231"></a> +<span class="sourceLineNo">232</span> try {<a name="line.232"></a> +<span class="sourceLineNo">233</span> for (int i = 0; i < args.length; i++) {<a name="line.233"></a> +<span class="sourceLineNo">234</span> String cmd = args[i];<a name="line.234"></a> +<span class="sourceLineNo">235</span> if (cmd.equals("-h") || cmd.startsWith("--h")) {<a name="line.235"></a> +<span class="sourceLineNo">236</span> printUsage(null);<a name="line.236"></a> +<span class="sourceLineNo">237</span> return false;<a name="line.237"></a> +<span class="sourceLineNo">238</span> }<a name="line.238"></a> +<span class="sourceLineNo">239</span><a name="line.239"></a> +<span class="sourceLineNo">240</span> final String startRowArgKey = "--startrow=";<a name="line.240"></a> +<span class="sourceLineNo">241</span> if (cmd.startsWith(startRowArgKey)) {<a name="line.241"></a> +<span class="sourceLineNo">242</span> startRow = cmd.substring(startRowArgKey.length());<a name="line.242"></a> +<span class="sourceLineNo">243</span> continue;<a name="line.243"></a> +<span class="sourceLineNo">244</span> }<a name="line.244"></a> +<span class="sourceLineNo">245</span><a name="line.245"></a> +<span class="sourceLineNo">246</span> final String stopRowArgKey = "--stoprow=";<a name="line.246"></a> +<span class="sourceLineNo">247</span> if (cmd.startsWith(stopRowArgKey)) {<a name="line.247"></a> +<span class="sourceLineNo">248</span> stopRow = cmd.substring(stopRowArgKey.length());<a name="line.248"></a> +<span class="sourceLineNo">249</span> continue;<a name="line.249"></a> +<span class="sourceLineNo">250</span> }<a name="line.250"></a> +<span class="sourceLineNo">251</span><a name="line.251"></a> +<span class="sourceLineNo">252</span> final String startTimeArgKey = "--starttime=";<a name="line.252"></a> +<span class="sourceLineNo">253</span> if (cmd.startsWith(startTimeArgKey)) {<a name="line.253"></a> +<span class="sourceLineNo">254</span> startTime = Long.parseLong(cmd.substring(startTimeArgKey.length()));<a name="line.254"></a> +<span class="sourceLineNo">255</span> continue;<a name="line.255"></a> +<span class="sourceLineNo">256</span> }<a name="line.256"></a> +<span class="sourceLineNo">257</span><a name="line.257"></a> +<span class="sourceLineNo">258</span> final String endTimeArgKey = "--endtime=";<a name="line.258"></a> +<span class="sourceLineNo">259</span> if (cmd.startsWith(endTimeArgKey)) {<a name="line.259"></a> +<span class="sourceLineNo">260</span> endTime = Long.parseLong(cmd.substring(endTimeArgKey.length()));<a name="line.260"></a> +<span class="sourceLineNo">261</span> continue;<a name="line.261"></a> +<span class="sourceLineNo">262</span> }<a name="line.262"></a> +<span class="sourceLineNo">263</span><a name="line.263"></a> +<span class="sourceLineNo">264</span> final String batchArgKey = "--batch=";<a name="line.264"></a> +<span class="sourceLineNo">265</span> if (cmd.startsWith(batchArgKey)) {<a name="line.265"></a> +<span class="sourceLineNo">266</span> batch = Integer.parseInt(cmd.substring(batchArgKey.length()));<a name="line.266"></a> +<span class="sourceLineNo">267</span> continue;<a name="line.267"></a> +<span class="sourceLineNo">268</span> }<a name="line.268"></a> +<span class="sourceLineNo">269</span><a name="line.269"></a> +<span class="sourceLineNo">270</span> final String cacheRowArgKey = "--cacheRow=";<a name="line.270"></a> +<span class="sourceLineNo">271</span> if (cmd.startsWith(cacheRowArgKey)) {<a name="line.271"></a> +<span class="sourceLineNo">272</span> cacheRow = Integer.parseInt(cmd.substring(cacheRowArgKey.length()));<a name="line.272"></a> +<span class="sourceLineNo">273</span> continue;<a name="line.273"></a> +<span class="sourceLineNo">274</span> }<a name="line.274"></a> +<span class="sourceLineNo">275</span><a name="line.275"></a> +<span class="sourceLineNo">276</span> final String versionsArgKey = "--versions=";<a name="line.276"></a> +<span class="sourceLineNo">277</span> if (cmd.startsWith(versionsArgKey)) {<a name="line.277"></a> +<span class="sourceLineNo">278</span> versions = Integer.parseInt(cmd.substring(versionsArgKey.length()));<a name="line.278"></a> +<span class="sourceLineNo">279</span> continue;<a name="line.279"></a> +<span class="sourceLineNo">280</span> }<a name="line.280"></a> +<span class="sourceLineNo">281</span><a name="line.281"></a> +<span class="sourceLineNo">282</span> final String newNameArgKey = "--new.name=";<a name="line.282"></a> +<span class="sourceLineNo">283</span> if (cmd.startsWith(newNameArgKey)) {<a name="line.283"></a> +<span class="sourceLineNo">284</span> dstTableName = cmd.substring(newNameArgKey.length());<a name="line.284"></a> +<span class="sourceLineNo">285</span> continue;<a name="line.285"></a> +<span class="sourceLineNo">286</span> }<a name="line.286"></a> +<span class="sourceLineNo">287</span><a name="line.287"></a> +<span class="sourceLineNo">288</span> final String peerAdrArgKey = "--peer.adr=";<a name="line.288"></a> +<span class="sourceLineNo">289</span> if (cmd.startsWith(peerAdrArgKey)) {<a name="line.289"></a> +<span class="sourceLineNo">290</span> peerAddress = cmd.substring(peerAdrArgKey.length());<a name="line.290"></a> +<span class="sourceLineNo">291</span> continue;<a name="line.291"></a> +<span class="sourceLineNo">292</span> }<a name="line.292"></a> +<span class="sourceLineNo">293</span><a name="line.293"></a> +<span class="sourceLineNo">294</span> final String familiesArgKey = "--families=";<a name="line.294"></a> +<span class="sourceLineNo">295</span> if (cmd.startsWith(familiesArgKey)) {<a name="line.295"></a> +<span class="sourceLineNo">296</span> families = cmd.substring(familiesArgKey.length());<a name="line.296"></a> +<span class="sourceLineNo">297</span> continue;<a name="line.297"></a> +<span class="sourceLineNo">298</span> }<a name="line.298"></a> +<span class="sourceLineNo">299</span><a name="line.299"></a> +<span class="sourceLineNo">300</span> if (cmd.startsWith("--all.cells")) {<a name="line.300"></a> +<span class="sourceLineNo">301</span> allCells = true;<a name="line.301"></a> +<span class="sourceLineNo">302</span> continue;<a name="line.302"></a> +<span class="sourceLineNo">303</span> }<a name="line.303"></a> +<span class="sourceLineNo">304</span><a name="line.304"></a> +<span class="sourceLineNo">305</span> if (cmd.startsWith("--bulkload")) {<a name="line.305"></a> +<span class="sourceLineNo">306</span> bulkload = true;<a name="line.306"></a> +<span class="sourceLineNo">307</span> continue;<a name="line.307"></a> +<span class="sourceLineNo">308</span> }<a name="line.308"></a> +<span class="sourceLineNo">309</span><a name="line.309"></a> +<span class="sourceLineNo">310</span> if (cmd.startsWith("--shuffle")) {<a name="line.310"></a> +<span class="sourceLineNo">311</span> shuffle = true;<a name="line.311"></a> +<span class="sourceLineNo">312</span> continue;<a name="line.312"></a> +<span class="sourceLineNo">313</span> }<a name="line.313"></a> +<span class="sourceLineNo">314</span><a name="line.314"></a> +<span class="sourceLineNo">315</span> if (i == args.length-1) {<a name="line.315"></a> +<span class="sourceLineNo">316</span> tableName = cmd;<a name="line.316"></a> +<span class="sourceLineNo">317</span> } else {<a name="line.317"></a> +<span class="sourceLineNo">318</span> printUsage("Invalid argument '" + cmd + "'");<a name="line.318"></a> +<span class="sourceLineNo">319</span> return false;<a name="line.319"></a> +<span class="sourceLineNo">320</span> }<a name="line.320"></a> +<span class="sourceLineNo">321</span> }<a name="line.321"></a> +<span class="sourceLineNo">322</span> if (dstTableName == null && peerAddress == null) {<a name="line.322"></a> +<span class="sourceLineNo">323</span> printUsage("At least a new table name or a " +<a name="line.323"></a> +<span class="sourceLineNo">324</span> "peer address must be specified");<a name="line.324"></a> +<span class="sourceLineNo">325</span> return false;<a name="line.325"></a> +<span class="sourceLineNo">326</span> }<a name="line.326"></a> +<span class="sourceLineNo">327</span> if ((endTime != 0) && (startTime > endTime)) {<a name="line.327"></a> +<span class="sourceLineNo">328</span> printUsage("Invalid time range filter: starttime=" + startTime + " > endtime=" + endTime);<a name="line.328"></a> +<span class="sourceLineNo">329</span> return false;<a name="line.329"></a> +<span class="sourceLineNo">330</span> }<a name="line.330"></a> +<span class="sourceLineNo">331</span><a name="line.331"></a> +<span class="sourceLineNo">332</span> if (bulkload && peerAddress != null) {<a name="line.332"></a> +<span class="sourceLineNo">333</span> printUsage("Remote bulkload is not supported!");<a name="line.333"></a> +<span class="sourceLineNo">334</span> return false;<a name="line.334"></a> +<span class="sourceLineNo">335</span> }<a name="line.335"></a> +<span class="sourceLineNo">336</span><a name="line.336"></a> +<span class="sourceLineNo">337</span> // set dstTableName if necessary<a name="line.337"></a> +<span class="sourceLineNo">338</span> if (dstTableName == null) {<a name="line.338"></a> +<span class="sourceLineNo">339</span> dstTableName = tableName;<a name="line.339"></a> +<span class="sourceLineNo">340</span> }<a name="line.340"></a> +<span class="sourceLineNo">341</span> } catch (Exception e) {<a name="line.341"></a> +<span class="sourceLineNo">342</span> e.printStackTrace();<a name="line.342"></a> +<span class="sourceLineNo">343</span> printUsage("Can't start because " + e.getMessage());<a name="line.343"></a> +<span class="sourceLineNo">344</span> return false;<a name="line.344"></a> +<span class="sourceLineNo">345</span> }<a name="line.345"></a> +<span class="sourceLineNo">346</span> return true;<a name="line.346"></a> +<span class="sourceLineNo">347</span> }<a name="line.347"></a> +<span class="sourceLineNo">348</span><a name="line.348"></a> +<span class="sourceLineNo">349</span> /**<a name="line.349"></a> +<span class="sourceLineNo">350</span> * Main entry point.<a name="line.350"></a> +<span class="sourceLineNo">351</span> *<a name="line.351"></a> +<span class="sourceLineNo">352</span> * @param args The command line parameters.<a name="line.352"></a> +<span class="sourceLineNo">353</span> * @throws Exception When running the job fails.<a name="line.353"></a> +<span class="sourceLineNo">354</span> */<a name="line.354"></a> +<span class="sourceLineNo">355</span> public static void main(String[] args) throws Exception {<a name="line.355"></a> +<span class="sourceLineNo">356</span> int ret = ToolRunner.run(HBaseConfiguration.create(), new CopyTable(), args);<a name="line.356"></a> +<span class="sourceLineNo">357</span> System.exit(ret);<a name="line.357"></a> +<span class="sourceLineNo">358</span> }<a name="line.358"></a> +<span class="sourceLineNo">359</span><a name="line.359"></a> +<span class="sourceLineNo">360</span> @Override<a name="line.360"></a> +<span class="sourceLineNo">361</span> public int run(String[] args) throws Exception {<a name="line.361"></a> +<span class="sourceLineNo">362</span> Job job = createSubmittableJob(args);<a name="line.362"></a> +<span class="sourceLineNo">363</span> if (job == null) return 1;<a name="line.363"></a> +<span class="sourceLineNo">364</span> if (!job.waitForCompletion(true)) {<a name="line.364"></a> +<span class="sourceLineNo">365</span> LOG.info("Map-reduce job failed!");<a name="line.365"></a> +<span class="sourceLineNo">366</span> if (bulkload) {<a name="line.366"></a> +<span class="sourceLineNo">367</span> LOG.info("Files are not bulkloaded!");<a name="line.367"></a> +<span class="sourceLineNo">368</span> }<a name="line.368"></a> +<span class="sourceLineNo">369</span> return 1;<a name="line.369"></a> +<span class="sourceLineNo">370</span> }<a name="line.370"></a> +<span class="sourceLineNo">371</span> int code = 0;<a name="line.371"></a> +<span class="sourceLineNo">372</span> if (bulkload) {<a name="line.372"></a> +<span class="sourceLineNo">373</span> code = new LoadIncrementalHFiles(this.getConf()).run(new String[]{this.bulkloadDir.toString(),<a name="line.373"></a> +<span class="sourceLineNo">374</span> this.dstTableName});<a name="line.374"></a> +<span class="sourceLineNo">375</span> if (code == 0) {<a name="line.375"></a> +<span class="sourceLineNo">376</span> // bulkloadDir is deleted only LoadIncrementalHFiles was successful so that one can rerun<a name="line.376"></a> +<span class="sourceLineNo">377</span> // LoadIncrementalHFiles.<a name="line.377"></a> +<span class="sourceLineNo">378</span> FileSystem fs = FileSystem.get(this.getConf());<a name="line.378"></a> +<span class="sourceLineNo">379</span> if (!fs.delete(this.bulkloadDir, true)) {<a name="line.379"></a> +<span class="sourceLineNo">380</span> LOG.error("Deleting folder " + bulkloadDir + " failed!");<a name="line.380"></a> +<span class="sourceLineNo">381</span> code = 1;<a name="line.381"></a> +<span class="sourceLineNo">382</span> }<a name="line.382"></a> +<span class="sourceLineNo">383</span> }<a name="line.383"></a> +<span class="sourceLineNo">384</span> }<a name="line.384"></a> +<span class="sourceLineNo">385</span> return code;<a name="line.385"></a> +<span class="sourceLineNo">386</span> }<a name="line.386"></a> +<span class="sourceLineNo">387</span>}<a name="line.387"></a>
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3b220124/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Driver.html ---------------------------------------------------------------------- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Driver.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Driver.html index 7cd733d..3a2be88 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Driver.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Driver.html @@ -31,45 +31,46 @@ <span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.classification.InterfaceStability;<a name="line.23"></a> <span class="sourceLineNo">024</span>import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;<a name="line.24"></a> <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.snapshot.ExportSnapshot;<a name="line.25"></a> -<span class="sourceLineNo">026</span>import org.apache.hadoop.util.ProgramDriver;<a name="line.26"></a> -<span class="sourceLineNo">027</span><a name="line.27"></a> -<span class="sourceLineNo">028</span>/**<a name="line.28"></a> -<span class="sourceLineNo">029</span> * Driver for hbase mapreduce jobs. Select which to run by passing<a name="line.29"></a> -<span class="sourceLineNo">030</span> * name of job to this main.<a name="line.30"></a> -<span class="sourceLineNo">031</span> */<a name="line.31"></a> -<span class="sourceLineNo">032</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.32"></a> -<span class="sourceLineNo">033</span>@InterfaceStability.Stable<a name="line.33"></a> -<span class="sourceLineNo">034</span>public class Driver {<a name="line.34"></a> -<span class="sourceLineNo">035</span> /**<a name="line.35"></a> -<span class="sourceLineNo">036</span> * @param args<a name="line.36"></a> -<span class="sourceLineNo">037</span> * @throws Throwable<a name="line.37"></a> -<span class="sourceLineNo">038</span> */<a name="line.38"></a> -<span class="sourceLineNo">039</span> public static void main(String[] args) throws Throwable {<a name="line.39"></a> -<span class="sourceLineNo">040</span> ProgramDriver pgd = new ProgramDriver();<a name="line.40"></a> -<span class="sourceLineNo">041</span><a name="line.41"></a> -<span class="sourceLineNo">042</span> pgd.addClass(RowCounter.NAME, RowCounter.class,<a name="line.42"></a> -<span class="sourceLineNo">043</span> "Count rows in HBase table.");<a name="line.43"></a> -<span class="sourceLineNo">044</span> pgd.addClass(CellCounter.NAME, CellCounter.class,<a name="line.44"></a> -<span class="sourceLineNo">045</span> "Count cells in HBase table.");<a name="line.45"></a> -<span class="sourceLineNo">046</span> pgd.addClass(Export.NAME, Export.class, "Write table data to HDFS.");<a name="line.46"></a> -<span class="sourceLineNo">047</span> pgd.addClass(Import.NAME, Import.class, "Import data written by Export.");<a name="line.47"></a> -<span class="sourceLineNo">048</span> pgd.addClass(ImportTsv.NAME, ImportTsv.class, "Import data in TSV format.");<a name="line.48"></a> -<span class="sourceLineNo">049</span> pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class,<a name="line.49"></a> -<span class="sourceLineNo">050</span> "Complete a bulk data load.");<a name="line.50"></a> -<span class="sourceLineNo">051</span> pgd.addClass(CopyTable.NAME, CopyTable.class,<a name="line.51"></a> -<span class="sourceLineNo">052</span> "Export a table from local cluster to peer cluster.");<a name="line.52"></a> -<span class="sourceLineNo">053</span> pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, "Compare" +<a name="line.53"></a> -<span class="sourceLineNo">054</span> " data from tables in two different clusters. It" +<a name="line.54"></a> -<span class="sourceLineNo">055</span> " doesn't work for incrementColumnValues'd cells since" +<a name="line.55"></a> -<span class="sourceLineNo">056</span> " timestamp is changed after appending to WAL.");<a name="line.56"></a> -<span class="sourceLineNo">057</span> pgd.addClass(WALPlayer.NAME, WALPlayer.class, "Replay WAL files.");<a name="line.57"></a> -<span class="sourceLineNo">058</span> pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, "Export" +<a name="line.58"></a> -<span class="sourceLineNo">059</span> " the specific snapshot to a given FileSystem.");<a name="line.59"></a> -<span class="sourceLineNo">060</span><a name="line.60"></a> -<span class="sourceLineNo">061</span> ProgramDriver.class.getMethod("driver", new Class [] {String[].class}).<a name="line.61"></a> -<span class="sourceLineNo">062</span> invoke(pgd, new Object[]{args});<a name="line.62"></a> -<span class="sourceLineNo">063</span> }<a name="line.63"></a> -<span class="sourceLineNo">064</span>}<a name="line.64"></a> +<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;<a name="line.26"></a> +<span class="sourceLineNo">027</span>import org.apache.hadoop.util.ProgramDriver;<a name="line.27"></a> +<span class="sourceLineNo">028</span><a name="line.28"></a> +<span class="sourceLineNo">029</span>/**<a name="line.29"></a> +<span class="sourceLineNo">030</span> * Driver for hbase mapreduce jobs. Select which to run by passing<a name="line.30"></a> +<span class="sourceLineNo">031</span> * name of job to this main.<a name="line.31"></a> +<span class="sourceLineNo">032</span> */<a name="line.32"></a> +<span class="sourceLineNo">033</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.33"></a> +<span class="sourceLineNo">034</span>@InterfaceStability.Stable<a name="line.34"></a> +<span class="sourceLineNo">035</span>public class Driver {<a name="line.35"></a> +<span class="sourceLineNo">036</span> /**<a name="line.36"></a> +<span class="sourceLineNo">037</span> * @param args<a name="line.37"></a> +<span class="sourceLineNo">038</span> * @throws Throwable<a name="line.38"></a> +<span class="sourceLineNo">039</span> */<a name="line.39"></a> +<span class="sourceLineNo">040</span> public static void main(String[] args) throws Throwable {<a name="line.40"></a> +<span class="sourceLineNo">041</span> ProgramDriver pgd = new ProgramDriver();<a name="line.41"></a> +<span class="sourceLineNo">042</span><a name="line.42"></a> +<span class="sourceLineNo">043</span> pgd.addClass(RowCounter.NAME, RowCounter.class,<a name="line.43"></a> +<span class="sourceLineNo">044</span> "Count rows in HBase table.");<a name="line.44"></a> +<span class="sourceLineNo">045</span> pgd.addClass(CellCounter.NAME, CellCounter.class,<a name="line.45"></a> +<span class="sourceLineNo">046</span> "Count cells in HBase table.");<a name="line.46"></a> +<span class="sourceLineNo">047</span> pgd.addClass(Export.NAME, Export.class, "Write table data to HDFS.");<a name="line.47"></a> +<span class="sourceLineNo">048</span> pgd.addClass(Import.NAME, Import.class, "Import data written by Export.");<a name="line.48"></a> +<span class="sourceLineNo">049</span> pgd.addClass(ImportTsv.NAME, ImportTsv.class, "Import data in TSV format.");<a name="line.49"></a> +<span class="sourceLineNo">050</span> pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class,<a name="line.50"></a> +<span class="sourceLineNo">051</span> "Complete a bulk data load.");<a name="line.51"></a> +<span class="sourceLineNo">052</span> pgd.addClass(CopyTable.NAME, CopyTable.class,<a name="line.52"></a> +<span class="sourceLineNo">053</span> "Export a table from local cluster to peer cluster.");<a name="line.53"></a> +<span class="sourceLineNo">054</span> pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, "Compare" +<a name="line.54"></a> +<span class="sourceLineNo">055</span> " data from tables in two different clusters. It" +<a name="line.55"></a> +<span class="sourceLineNo">056</span> " doesn't work for incrementColumnValues'd cells since" +<a name="line.56"></a> +<span class="sourceLineNo">057</span> " timestamp is changed after appending to WAL.");<a name="line.57"></a> +<span class="sourceLineNo">058</span> pgd.addClass(WALPlayer.NAME, WALPlayer.class, "Replay WAL files.");<a name="line.58"></a> +<span class="sourceLineNo">059</span> pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, "Export" +<a name="line.59"></a> +<span class="sourceLineNo">060</span> " the specific snapshot to a given FileSystem.");<a name="line.60"></a> +<span class="sourceLineNo">061</span><a name="line.61"></a> +<span class="sourceLineNo">062</span> ProgramDriver.class.getMethod("driver", new Class [] {String[].class}).<a name="line.62"></a> +<span class="sourceLineNo">063</span> invoke(pgd, new Object[]{args});<a name="line.63"></a> +<span class="sourceLineNo">064</span> }<a name="line.64"></a> +<span class="sourceLineNo">065</span>}<a name="line.65"></a> http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3b220124/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html ---------------------------------------------------------------------- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html index ce5c663..68b5af1 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html @@ -50,7 +50,7 @@ <span class="sourceLineNo">042</span> *<a name="line.42"></a> <span class="sourceLineNo">043</span> * <p>This class is not suitable as partitioner creating hfiles<a name="line.43"></a> <span class="sourceLineNo">044</span> * for incremental bulk loads as region spread will likely change between time of<a name="line.44"></a> -<span class="sourceLineNo">045</span> * hfile creation and load time. See {@link LoadIncrementalHFiles}<a name="line.45"></a> +<span class="sourceLineNo">045</span> * hfile creation and load time. See {@link org.apache.hadoop.hbase.tool.LoadIncrementalHFiles}<a name="line.45"></a> <span class="sourceLineNo">046</span> * and <a href="http://hbase.apache.org/book.html#arch.bulk.load">Bulk Load</a>.</p><a name="line.46"></a> <span class="sourceLineNo">047</span> *<a name="line.47"></a> <span class="sourceLineNo">048</span> * @param <KEY> The type of the key.<a name="line.48"></a>