http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f17356a7/hbase-spark/project-summary.html
----------------------------------------------------------------------
diff --git a/hbase-spark/project-summary.html b/hbase-spark/project-summary.html
index e2fb2a0..a5287e0 100644
--- a/hbase-spark/project-summary.html
+++ b/hbase-spark/project-summary.html
@@ -1,5 +1,5 @@
 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-<!-- Generated by Apache Maven Doxia Site Renderer 1.6 at 2016-11-11 -->
+<!-- Generated by Apache Maven Doxia Site Renderer 1.6 at 2016-11-14 -->
 <html xmlns="http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
@@ -10,7 +10,7 @@
       @import url("./css/site.css");
     </style>
     <link rel="stylesheet" href="./css/print.css" type="text/css" 
media="print" />
-    <meta name="Date-Revision-yyyymmdd" content="20161111" />
+    <meta name="Date-Revision-yyyymmdd" content="20161114" />
     <meta http-equiv="Content-Language" content="en" />
         
         </head>
@@ -27,7 +27,7 @@
             
                     
                 <div class="xleft">
-        <span id="publishDate">Last Published: 2016-11-11</span>
+        <span id="publishDate">Last Published: 2016-11-14</span>
                   &nbsp;| <span id="projectVersion">Version: 
2.0.0-SNAPSHOT</span>
                       </div>
             <div class="xright">                    <a href="./" title="Apache 
HBase - Spark">Apache HBase - Spark</a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f17356a7/hbase-spark/scaladocs/index.html
----------------------------------------------------------------------
diff --git a/hbase-spark/scaladocs/index.html b/hbase-spark/scaladocs/index.html
index 24f7a20..ebbaaba 100644
--- a/hbase-spark/scaladocs/index.html
+++ b/hbase-spark/scaladocs/index.html
@@ -40,7 +40,7 @@
             <ol class="templates"></ol>
             <ol class="packages"> <li class="pack" 
title="org.apache.hadoop.hbase.spark">
             <a class="tplshow" 
href="org/apache/hadoop/hbase/spark/package.html" 
target="template">org.apache.hadoop.hbase.spark</a>
-            <ol class="templates"><li 
title="org.apache.hadoop.hbase.spark.AndLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/AndLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">AndLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.AvroException"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/AvroException.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">AvroException</span></a></li><li 
title="org.apache.hadoop.hbase.spark.AvroSerdes"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/AvroSerdes$.html" target="template"><span 
class="object">(object)</span><div class="placeholder"></div><span 
class="tplLink">AvroSerdes</span></a></li><li 
title="org.apache.hadoop.hbase.spark.BulkLoadPartitioner"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/BulkLoadPar
 titioner.html" target="template"><span class="class">(class)</span><span 
class="tplLink">BulkLoadPartitioner</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ByteArrayComparable"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ByteArrayComparable.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ByteArrayComparable</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ByteArrayWrapper"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ByteArrayWrapper.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ByteArrayWrapper</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ColumnFamilyQualifierMapKeyWrapper"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ColumnFamilyQualifierMapKeyWrapper.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ColumnFamilyQualifierMapKeyWrapper</sp
 an></a></li><li title="org.apache.hadoop.hbase.spark.ColumnFilter"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ColumnFilter.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">ColumnFilter</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ColumnFilterCollection"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ColumnFilterCollection.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ColumnFilterCollection</span></a></li><li 
title="org.apache.hadoop.hbase.spark.CompareTrait"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/CompareTrait.html" target="template"><span 
class="trait">(trait)</span><span 
class="tplLink">CompareTrait</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DefaultSource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/DefaultSource.html" target=
 "template"><span class="class">(class)</span><span 
class="tplLink">DefaultSource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DefaultSourceStaticUtils"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/DefaultSourceStaticUtils$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">DefaultSourceStaticUtils</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DynamicLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/DynamicLogicExpression.html" 
target="template"><span class="trait">(trait)</span><span 
class="tplLink">DynamicLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DynamicLogicExpressionBuilder"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/DynamicLogicExpressionBuilder$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">DynamicLogicExpressionBuilder</span><
 /a></li><li title="org.apache.hadoop.hbase.spark.EqualLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/EqualLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">EqualLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ExecutionRuleForUnitTesting"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ExecutionRuleForUnitTesting.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ExecutionRuleForUnitTesting</span></a></li><li 
title="org.apache.hadoop.hbase.spark.FamiliesQualifiersValues"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/FamiliesQualifiersValues.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">FamiliesQualifiersValues</span></a></li><li 
title="org.apache.hadoop.hbase.spark.FamilyHFileWriteOptions"><div 
class="placeholder"></div><
 a class="tplshow" 
href="org/apache/hadoop/hbase/spark/FamilyHFileWriteOptions.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">FamilyHFileWriteOptions</span></a></li><li 
title="org.apache.hadoop.hbase.spark.GreaterThanLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/GreaterThanLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">GreaterThanLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.GreaterThanOrEqualLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/GreaterThanOrEqualLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">GreaterThanOrEqualLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseConnectionKey"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseConnectionKey.html"
  target="template"><span class="class">(class)</span><span 
class="tplLink">HBaseConnectionKey</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseContext"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseContext.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">HBaseContext</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseDStreamFunctions"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseDStreamFunctions$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">HBaseDStreamFunctions</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseRDDFunctions"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseRDDFunctions$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">HBaseRDDFunctions</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseRelation
 "><div class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseRelation.html" target="template"><span 
class="case class">(case class)</span><span 
class="tplLink">HBaseRelation</span></a></li><li 
title="org.apache.hadoop.hbase.spark.IsNullLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/IsNullLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">IsNullLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.JavaHBaseContext"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/JavaHBaseContext.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">JavaHBaseContext</span></a></li><li 
title="org.apache.hadoop.hbase.spark.KeyFamilyQualifier"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/KeyFamilyQualifier.html" 
target="template"><span class="class">(cl
 ass)</span><span class="tplLink">KeyFamilyQualifier</span></a></li><li 
title="org.apache.hadoop.hbase.spark.LatestHBaseContextCache"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/LatestHBaseContextCache$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">LatestHBaseContextCache</span></a></li><li 
title="org.apache.hadoop.hbase.spark.LessThanLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/LessThanLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">LessThanLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.LessThanOrEqualLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/LessThanOrEqualLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">LessThanOrEqualLogicExpression</span></a></li><li 
title="org.apache
 .hadoop.hbase.spark.NewHBaseRDD"><div class="placeholder"></div><a 
class="tplshow" href="org/apache/hadoop/hbase/spark/NewHBaseRDD.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">NewHBaseRDD</span></a></li><li 
title="org.apache.hadoop.hbase.spark.OrLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/OrLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">OrLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.PassThroughLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/PassThroughLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">PassThroughLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.RowKeyFilter"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/RowKeyFilter.html" target="template">
 <span class="class">(class)</span><span 
class="tplLink">RowKeyFilter</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ScanRange"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ScanRange.html" target="template"><span 
class="class">(class)</span><span class="tplLink">ScanRange</span></a></li><li 
title="org.apache.hadoop.hbase.spark.SchemaConversionException"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/SchemaConversionException.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">SchemaConversionException</span></a></li><li 
title="org.apache.hadoop.hbase.spark.SchemaConverters"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/SchemaConverters$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">SchemaConverters</span></a></li><li 
title="org.apache.hadoop.hbase.spark.SparkSQLPushDownFilter"><div cl
 ass="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">SparkSQLPushDownFilter</span></a></li></ol>
+            <ol class="templates"><li 
title="org.apache.hadoop.hbase.spark.AndLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/AndLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">AndLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.AvroException"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/AvroException.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">AvroException</span></a></li><li 
title="org.apache.hadoop.hbase.spark.AvroSerdes"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/AvroSerdes$.html" target="template"><span 
class="object">(object)</span><div class="placeholder"></div><span 
class="tplLink">AvroSerdes</span></a></li><li 
title="org.apache.hadoop.hbase.spark.BulkLoadPartitioner"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/BulkLoadPar
 titioner.html" target="template"><span class="class">(class)</span><span 
class="tplLink">BulkLoadPartitioner</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ByteArrayComparable"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ByteArrayComparable.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ByteArrayComparable</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ByteArrayWrapper"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ByteArrayWrapper.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ByteArrayWrapper</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ColumnFamilyQualifierMapKeyWrapper"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ColumnFamilyQualifierMapKeyWrapper.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ColumnFamilyQualifierMapKeyWrapper</sp
 an></a></li><li title="org.apache.hadoop.hbase.spark.ColumnFilter"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ColumnFilter.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">ColumnFilter</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ColumnFilterCollection"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ColumnFilterCollection.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ColumnFilterCollection</span></a></li><li 
title="org.apache.hadoop.hbase.spark.CompareTrait"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/CompareTrait.html" target="template"><span 
class="trait">(trait)</span><span 
class="tplLink">CompareTrait</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DefaultSource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/DefaultSource.html" target=
 "template"><span class="class">(class)</span><span 
class="tplLink">DefaultSource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DefaultSourceStaticUtils"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/DefaultSourceStaticUtils$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">DefaultSourceStaticUtils</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DynamicLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/DynamicLogicExpression.html" 
target="template"><span class="trait">(trait)</span><span 
class="tplLink">DynamicLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.DynamicLogicExpressionBuilder"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/DynamicLogicExpressionBuilder$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">DynamicLogicExpressionBuilder</span><
 /a></li><li title="org.apache.hadoop.hbase.spark.EqualLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/EqualLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">EqualLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ExecutionRuleForUnitTesting"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ExecutionRuleForUnitTesting.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">ExecutionRuleForUnitTesting</span></a></li><li 
title="org.apache.hadoop.hbase.spark.FamiliesQualifiersValues"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/FamiliesQualifiersValues.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">FamiliesQualifiersValues</span></a></li><li 
title="org.apache.hadoop.hbase.spark.FamilyHFileWriteOptions"><div 
class="placeholder"></div><
 a class="tplshow" 
href="org/apache/hadoop/hbase/spark/FamilyHFileWriteOptions.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">FamilyHFileWriteOptions</span></a></li><li 
title="org.apache.hadoop.hbase.spark.GreaterThanLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/GreaterThanLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">GreaterThanLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.GreaterThanOrEqualLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/GreaterThanOrEqualLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">GreaterThanOrEqualLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseConnectionCacheStat"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseConnectionCac
 heStat.html" target="template"><span class="case class">(case 
class)</span><span class="tplLink">HBaseConnectionCacheStat</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseConnectionKey"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseConnectionKey.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">HBaseConnectionKey</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseContext"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseContext.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">HBaseContext</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseDStreamFunctions"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseDStreamFunctions$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">HBaseDStreamFunctions</span></a></li><li title="org.apache.hado
 op.hbase.spark.HBaseRDDFunctions"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseRDDFunctions$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">HBaseRDDFunctions</span></a></li><li 
title="org.apache.hadoop.hbase.spark.HBaseRelation"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/HBaseRelation.html" target="template"><span 
class="case class">(case class)</span><span 
class="tplLink">HBaseRelation</span></a></li><li 
title="org.apache.hadoop.hbase.spark.IsNullLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/IsNullLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">IsNullLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.JavaHBaseContext"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/JavaHBaseContext.html" target="tem
 plate"><span class="class">(class)</span><span 
class="tplLink">JavaHBaseContext</span></a></li><li 
title="org.apache.hadoop.hbase.spark.KeyFamilyQualifier"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/KeyFamilyQualifier.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">KeyFamilyQualifier</span></a></li><li 
title="org.apache.hadoop.hbase.spark.LatestHBaseContextCache"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/LatestHBaseContextCache$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">LatestHBaseContextCache</span></a></li><li 
title="org.apache.hadoop.hbase.spark.LessThanLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/LessThanLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">LessThanLogicExpression</span></a></li><li 
title="org.apache.hadoop.
 hbase.spark.LessThanOrEqualLogicExpression"><div class="placeholder"></div><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/LessThanOrEqualLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">LessThanOrEqualLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.NewHBaseRDD"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/NewHBaseRDD.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">NewHBaseRDD</span></a></li><li 
title="org.apache.hadoop.hbase.spark.OrLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/OrLogicExpression.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">OrLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.PassThroughLogicExpression"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/PassThroughLogicExp
 ression.html" target="template"><span class="class">(class)</span><span 
class="tplLink">PassThroughLogicExpression</span></a></li><li 
title="org.apache.hadoop.hbase.spark.RowKeyFilter"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/RowKeyFilter.html" target="template"><span 
class="class">(class)</span><span 
class="tplLink">RowKeyFilter</span></a></li><li 
title="org.apache.hadoop.hbase.spark.ScanRange"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/ScanRange.html" target="template"><span 
class="class">(class)</span><span class="tplLink">ScanRange</span></a></li><li 
title="org.apache.hadoop.hbase.spark.SchemaConversionException"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/SchemaConversionException.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">SchemaConversionException</span></a></li><li 
title="org.apache.hadoop.hbase.spark.
 SchemaConverters"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/SchemaConverters$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">SchemaConverters</span></a></li><li 
title="org.apache.hadoop.hbase.spark.SparkSQLPushDownFilter"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">SparkSQLPushDownFilter</span></a></li></ol>
             <ol class="packages"> <li class="pack" 
title="org.apache.hadoop.hbase.spark.datasources">
             <a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/package.html" 
target="template">org.apache.hadoop.hbase.spark.datasources</a>
             <ol class="templates"><li 
title="org.apache.hadoop.hbase.spark.datasources.Bound"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/Bound.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">Bound</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.BoundRange"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/BoundRange.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">BoundRange</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.BoundRanges"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/BoundRanges.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">BoundRanges</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.BytesEncoder"><div 
class="placeholder"></div><a clas
 s="tplshow" href="org/apache/hadoop/hbase/spark/datasources/BytesEncoder.html" 
target="template"><span class="trait">(trait)</span><span 
class="tplLink">BytesEncoder</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.DoubleSerDes"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/DoubleSerDes.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">DoubleSerDes</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.GetResource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/GetResource.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">GetResource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.HBaseResources"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/HBaseResources$.html" 
target="template"><span class="object">(object)</span><div class="placeholder">
 </div><span class="tplLink">HBaseResources</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/HBaseSparkConf$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">HBaseSparkConf</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.HBaseTableScanRDD"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">HBaseTableScanRDD</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/JavaBytesEncoder$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span 
class="tplLink">JavaBytesEncoder</span></a></li><li 
title="org.apache.hadoop.hbase.spark.d
 atasources.NaiveEncoder"><div class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">NaiveEncoder</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.Points"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/Points$.html" 
target="template"><span class="object">(object)</span><div 
class="placeholder"></div><span class="tplLink">Points</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.Range"><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/Range$.html" 
target="template"><span class="object">(object)</span></a><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/Range.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">Range</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.Ranges"><a class="tplshow" 
href="org/apache/had
 oop/hbase/spark/datasources/Ranges$.html" target="template"><span 
class="object">(object)</span><div class="placeholder"></div><span 
class="tplLink">Ranges</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.RDDResources"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/RDDResources.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">RDDResources</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.ReferencedResource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/ReferencedResource.html" 
target="template"><span class="trait">(trait)</span><span 
class="tplLink">ReferencedResource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.RegionResource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/RegionResource.html" 
target="template"><span class="case cl
 ass">(case class)</span><span 
class="tplLink">RegionResource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.Resource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/Resource.html" 
target="template"><span class="trait">(trait)</span><span 
class="tplLink">Resource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.ScanResource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/ScanResource.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">ScanResource</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.SerDes"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/SerDes.html" 
target="template"><span class="trait">(trait)</span><span 
class="tplLink">SerDes</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.SerializableConfiguration"><di
 v class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/SerializableConfiguration.html" 
target="template"><span class="class">(class)</span><span 
class="tplLink">SerializableConfiguration</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.SerializedFilter"><a 
class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/SerializedFilter$.html" 
target="template"><span class="object">(object)</span></a><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/SerializedFilter.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">SerializedFilter</span></a></li><li 
title="org.apache.hadoop.hbase.spark.datasources.TableResource"><div 
class="placeholder"></div><a class="tplshow" 
href="org/apache/hadoop/hbase/spark/datasources/TableResource.html" 
target="template"><span class="case class">(case class)</span><span 
class="tplLink">TableResource</span></a></li></ol>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f17356a7/hbase-spark/scaladocs/index.js
----------------------------------------------------------------------
diff --git a/hbase-spark/scaladocs/index.js b/hbase-spark/scaladocs/index.js
index 324935b..f370baa 100644
--- a/hbase-spark/scaladocs/index.js
+++ b/hbase-spark/scaladocs/index.js
@@ -1 +1 @@
-Index.PACKAGES = {"org.apache.hadoop.hbase.spark.datasources" : [{"case class" 
: "org\/apache\/hadoop\/hbase\/spark\/datasources\/Bound.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Bound"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/BoundRange.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.BoundRange"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/BoundRanges.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.BoundRanges"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/BytesEncoder.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.BytesEncoder"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/DoubleSerDes.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.DoubleSerDes"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/GetResource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.GetResource"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark
 \/datasources\/HBaseResources$.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.HBaseResources"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/HBaseSparkConf$.html", "name" 
: "org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/HBaseTableScanRDD.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.HBaseTableScanRDD"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/JavaBytesEncoder$.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/datasources\/NaiveEncoder.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.NaiveEncoder"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Points$.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Points"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Range$.html", "case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasou
 rces\/Range.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Range"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Ranges$.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Ranges"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/RDDResources.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.RDDResources"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/ReferencedResource.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.ReferencedResource"}, 
{"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/RegionResource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.RegionResource"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Resource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Resource"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/ScanResource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.ScanResource"}, {"trait" : "org\
 /apache\/hadoop\/hbase\/spark\/datasources\/SerDes.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.SerDes"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/SerializableConfiguration.html",
 "name" : 
"org.apache.hadoop.hbase.spark.datasources.SerializableConfiguration"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/SerializedFilter$.html", "case 
class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/SerializedFilter.html", "name" 
: "org.apache.hadoop.hbase.spark.datasources.SerializedFilter"}, {"case class" 
: "org\/apache\/hadoop\/hbase\/spark\/datasources\/TableResource.html", "name" 
: "org.apache.hadoop.hbase.spark.datasources.TableResource"}], 
"org.apache.hadoop.hbase.spark.example" : [], "org.apache.hadoop.hbase.spark" : 
[{"class" : "org\/apache\/hadoop\/hbase\/spark\/AndLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.AndLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/AvroException.html", "name" : "org.apa
 che.hadoop.hbase.spark.AvroException"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/AvroSerdes$.html", "name" : 
"org.apache.hadoop.hbase.spark.AvroSerdes"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/BulkLoadPartitioner.html", "name" : 
"org.apache.hadoop.hbase.spark.BulkLoadPartitioner"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ByteArrayComparable.html", "name" : 
"org.apache.hadoop.hbase.spark.ByteArrayComparable"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ByteArrayWrapper.html", "name" : 
"org.apache.hadoop.hbase.spark.ByteArrayWrapper"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ColumnFamilyQualifierMapKeyWrapper.html", 
"name" : "org.apache.hadoop.hbase.spark.ColumnFamilyQualifierMapKeyWrapper"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/ColumnFilter.html", "name" : 
"org.apache.hadoop.hbase.spark.ColumnFilter"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ColumnFilterCollection.html", "name" : 
"org.apache.hadoop.hbase.spark.ColumnFilterCollection"}
 , {"trait" : "org\/apache\/hadoop\/hbase\/spark\/CompareTrait.html", "name" : 
"org.apache.hadoop.hbase.spark.CompareTrait"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/DefaultSource.html", "name" : 
"org.apache.hadoop.hbase.spark.DefaultSource"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/DefaultSourceStaticUtils$.html", "name" : 
"org.apache.hadoop.hbase.spark.DefaultSourceStaticUtils"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/DynamicLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.DynamicLogicExpression"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/DynamicLogicExpressionBuilder$.html", 
"name" : "org.apache.hadoop.hbase.spark.DynamicLogicExpressionBuilder"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/EqualLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.EqualLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ExecutionRuleForUnitTesting.html", "name" : 
"org.apache.hadoop.hbase.spark.ExecutionRuleForUnitTesting"}, {"cl
 ass" : "org\/apache\/hadoop\/hbase\/spark\/FamiliesQualifiersValues.html", 
"name" : "org.apache.hadoop.hbase.spark.FamiliesQualifiersValues"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/FamilyHFileWriteOptions.html", "name" : 
"org.apache.hadoop.hbase.spark.FamilyHFileWriteOptions"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/GreaterThanLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.GreaterThanLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/GreaterThanOrEqualLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.GreaterThanOrEqualLogicExpression"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/HBaseConnectionKey.html", "name" 
: "org.apache.hadoop.hbase.spark.HBaseConnectionKey"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseContext.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseContext"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseDStreamFunctions$.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseDStreamFunct
 ions"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseRDDFunctions$.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseRDDFunctions"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseRelation.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseRelation"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/IsNullLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.IsNullLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/JavaHBaseContext.html", "name" : 
"org.apache.hadoop.hbase.spark.JavaHBaseContext"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/KeyFamilyQualifier.html", "name" : 
"org.apache.hadoop.hbase.spark.KeyFamilyQualifier"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/LatestHBaseContextCache$.html", "name" : 
"org.apache.hadoop.hbase.spark.LatestHBaseContextCache"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/LessThanLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.LessThanLogicExpression"}, {"class" : 
"org\/apache\/
 hadoop\/hbase\/spark\/LessThanOrEqualLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.LessThanOrEqualLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/NewHBaseRDD.html", "name" : 
"org.apache.hadoop.hbase.spark.NewHBaseRDD"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/OrLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.OrLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/PassThroughLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.PassThroughLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/RowKeyFilter.html", "name" : 
"org.apache.hadoop.hbase.spark.RowKeyFilter"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ScanRange.html", "name" : 
"org.apache.hadoop.hbase.spark.ScanRange"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/SchemaConversionException.html", "name" : 
"org.apache.hadoop.hbase.spark.SchemaConversionException"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/SchemaConverters
 $.html", "name" : "org.apache.hadoop.hbase.spark.SchemaConverters"}, {"class" 
: "org\/apache\/hadoop\/hbase\/spark\/SparkSQLPushDownFilter.html", "name" : 
"org.apache.hadoop.hbase.spark.SparkSQLPushDownFilter"}], 
"org.apache.hadoop.hbase.spark.example.datasources" : [{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/AvroHBaseRecord$.html",
 "case class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/AvroHBaseRecord.html",
 "name" : "org.apache.hadoop.hbase.spark.example.datasources.AvroHBaseRecord"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/AvroSource$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.AvroSource"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/DataType$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.DataType"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/HBaseRecord$.html", 
"case class" : "org\/apache\/hadoop\/hbase\/spar
 k\/example\/datasources\/HBaseRecord.html", "name" : 
"org.apache.hadoop.hbase.spark.example.datasources.HBaseRecord"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/HBaseSource$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.HBaseSource"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/IntKeyRecord$.html", 
"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/IntKeyRecord.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.IntKeyRecord"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/UserCustomizedSampleException$.html",
 "class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/UserCustomizedSampleException.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.datasources.UserCustomizedSampleException"}],
 "org.apache.hadoop" : [], "org.apache.spark" : [], "org.apache" : [], 
"org.apache.spark.sql.datasources.hbase" : [{"object" : "org\/apache\/spark\
 /sql\/datasources\/hbase\/DataTypeParserWrapper$.html", "name" : 
"org.apache.spark.sql.datasources.hbase.DataTypeParserWrapper"}, {"case class" 
: "org\/apache\/spark\/sql\/datasources\/hbase\/Field.html", "name" : 
"org.apache.spark.sql.datasources.hbase.Field"}, {"object" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/HBaseTableCatalog$.html", "case 
class" : "org\/apache\/spark\/sql\/datasources\/hbase\/HBaseTableCatalog.html", 
"name" : "org.apache.spark.sql.datasources.hbase.HBaseTableCatalog"}, {"case 
class" : "org\/apache\/spark\/sql\/datasources\/hbase\/RowKey.html", "name" : 
"org.apache.spark.sql.datasources.hbase.RowKey"}, {"case class" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/SchemaMap.html", "name" : 
"org.apache.spark.sql.datasources.hbase.SchemaMap"}, {"case class" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/SchemaQualifierDefinition.html", 
"name" : "org.apache.spark.sql.datasources.hbase.SchemaQualifierDefinition"}, 
{"object" : "org\/apache\/spark\/sql\/datasou
 rces\/hbase\/Utils$.html", "name" : 
"org.apache.spark.sql.datasources.hbase.Utils"}], 
"org.apache.spark.sql.datasources" : [], 
"org.apache.hadoop.hbase.spark.example.hbasecontext" : [{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkDeleteExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkDeleteExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkGetExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkGetExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkPutExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkPutExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkPutExampleFromFile$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkPutExampleFromFile"},
 {"object" : "org\/apache\/hadoop\/hbase\/spark\/example\/hba
 secontext\/HBaseBulkPutTimestampExample$.html", "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkPutTimestampExample"},
 {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseDistributedScanExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseDistributedScanExample"},
 {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseStreamingBulkPutExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseStreamingBulkPutExample"},
 {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseBulkDeleteExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkDeleteExample"},
 {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseBulkGetExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkGetExample"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/Ja
 vaHBaseBulkPutExample.html", "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkPutExample"}, 
{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseDistributedScan.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseDistributedScan"}, 
{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseMapGetPutExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseMapGetPutExample"},
 {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseStreamingBulkPutExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseStreamingBulkPutExample"}],
 "org.apache.hadoop.hbase.spark.example.rdd" : [{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseBulkDeleteExample$.html",
 "name" : "org.apache.hadoop.hbase.spark.example.rdd.HBaseBulkDeleteExample"}, 
{"object" : "org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseBul
 kGetExample$.html", "name" : 
"org.apache.hadoop.hbase.spark.example.rdd.HBaseBulkGetExample"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseBulkPutExample$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.rdd.HBaseBulkPutExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseForeachPartitionExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.rdd.HBaseForeachPartitionExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseMapPartitionExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.rdd.HBaseMapPartitionExample"}], 
"org.apache.hadoop.hbase" : [], "org.apache.spark.sql" : [], "org" : [], 
"org.apache.hadoop.hbase.spark.protobuf" : [], 
"org.apache.hadoop.hbase.spark.protobuf.generated" : [{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/protobuf\/generated\/SparkFilterProtos.html",
 "name" : 
"org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos"}]};
\ No newline at end of file
+Index.PACKAGES = {"org.apache.hadoop.hbase.spark.datasources" : [{"case class" 
: "org\/apache\/hadoop\/hbase\/spark\/datasources\/Bound.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Bound"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/BoundRange.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.BoundRange"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/BoundRanges.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.BoundRanges"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/BytesEncoder.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.BytesEncoder"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/DoubleSerDes.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.DoubleSerDes"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/GetResource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.GetResource"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark
 \/datasources\/HBaseResources$.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.HBaseResources"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/HBaseSparkConf$.html", "name" 
: "org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/HBaseTableScanRDD.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.HBaseTableScanRDD"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/JavaBytesEncoder$.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/datasources\/NaiveEncoder.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.NaiveEncoder"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Points$.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Points"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Range$.html", "case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasou
 rces\/Range.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Range"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Ranges$.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Ranges"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/RDDResources.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.RDDResources"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/ReferencedResource.html", 
"name" : "org.apache.hadoop.hbase.spark.datasources.ReferencedResource"}, 
{"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/RegionResource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.RegionResource"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/Resource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.Resource"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/ScanResource.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.ScanResource"}, {"trait" : "org\
 /apache\/hadoop\/hbase\/spark\/datasources\/SerDes.html", "name" : 
"org.apache.hadoop.hbase.spark.datasources.SerDes"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/SerializableConfiguration.html",
 "name" : 
"org.apache.hadoop.hbase.spark.datasources.SerializableConfiguration"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/SerializedFilter$.html", "case 
class" : 
"org\/apache\/hadoop\/hbase\/spark\/datasources\/SerializedFilter.html", "name" 
: "org.apache.hadoop.hbase.spark.datasources.SerializedFilter"}, {"case class" 
: "org\/apache\/hadoop\/hbase\/spark\/datasources\/TableResource.html", "name" 
: "org.apache.hadoop.hbase.spark.datasources.TableResource"}], 
"org.apache.hadoop.hbase.spark.example" : [], "org.apache.hadoop.hbase.spark" : 
[{"class" : "org\/apache\/hadoop\/hbase\/spark\/AndLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.AndLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/AvroException.html", "name" : "org.apa
 che.hadoop.hbase.spark.AvroException"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/AvroSerdes$.html", "name" : 
"org.apache.hadoop.hbase.spark.AvroSerdes"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/BulkLoadPartitioner.html", "name" : 
"org.apache.hadoop.hbase.spark.BulkLoadPartitioner"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ByteArrayComparable.html", "name" : 
"org.apache.hadoop.hbase.spark.ByteArrayComparable"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ByteArrayWrapper.html", "name" : 
"org.apache.hadoop.hbase.spark.ByteArrayWrapper"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ColumnFamilyQualifierMapKeyWrapper.html", 
"name" : "org.apache.hadoop.hbase.spark.ColumnFamilyQualifierMapKeyWrapper"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/ColumnFilter.html", "name" : 
"org.apache.hadoop.hbase.spark.ColumnFilter"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ColumnFilterCollection.html", "name" : 
"org.apache.hadoop.hbase.spark.ColumnFilterCollection"}
 , {"trait" : "org\/apache\/hadoop\/hbase\/spark\/CompareTrait.html", "name" : 
"org.apache.hadoop.hbase.spark.CompareTrait"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/DefaultSource.html", "name" : 
"org.apache.hadoop.hbase.spark.DefaultSource"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/DefaultSourceStaticUtils$.html", "name" : 
"org.apache.hadoop.hbase.spark.DefaultSourceStaticUtils"}, {"trait" : 
"org\/apache\/hadoop\/hbase\/spark\/DynamicLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.DynamicLogicExpression"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/DynamicLogicExpressionBuilder$.html", 
"name" : "org.apache.hadoop.hbase.spark.DynamicLogicExpressionBuilder"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/EqualLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.EqualLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ExecutionRuleForUnitTesting.html", "name" : 
"org.apache.hadoop.hbase.spark.ExecutionRuleForUnitTesting"}, {"cl
 ass" : "org\/apache\/hadoop\/hbase\/spark\/FamiliesQualifiersValues.html", 
"name" : "org.apache.hadoop.hbase.spark.FamiliesQualifiersValues"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/FamilyHFileWriteOptions.html", "name" : 
"org.apache.hadoop.hbase.spark.FamilyHFileWriteOptions"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/GreaterThanLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.GreaterThanLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/GreaterThanOrEqualLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.GreaterThanOrEqualLogicExpression"}, 
{"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseConnectionCacheStat.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseConnectionCacheStat"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseConnectionKey.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseConnectionKey"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseContext.html", "name" : 
"org.apache.hadoop.hbase.spark
 .HBaseContext"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseDStreamFunctions$.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseDStreamFunctions"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseRDDFunctions$.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseRDDFunctions"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/HBaseRelation.html", "name" : 
"org.apache.hadoop.hbase.spark.HBaseRelation"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/IsNullLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.IsNullLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/JavaHBaseContext.html", "name" : 
"org.apache.hadoop.hbase.spark.JavaHBaseContext"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/KeyFamilyQualifier.html", "name" : 
"org.apache.hadoop.hbase.spark.KeyFamilyQualifier"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/LatestHBaseContextCache$.html", "name" : 
"org.apache.hadoop.hbase.spark.LatestHBaseContextCache"}, {"class" : "org\/a
 pache\/hadoop\/hbase\/spark\/LessThanLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.LessThanLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/LessThanOrEqualLogicExpression.html", 
"name" : "org.apache.hadoop.hbase.spark.LessThanOrEqualLogicExpression"}, 
{"class" : "org\/apache\/hadoop\/hbase\/spark\/NewHBaseRDD.html", "name" : 
"org.apache.hadoop.hbase.spark.NewHBaseRDD"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/OrLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.OrLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/PassThroughLogicExpression.html", "name" : 
"org.apache.hadoop.hbase.spark.PassThroughLogicExpression"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/RowKeyFilter.html", "name" : 
"org.apache.hadoop.hbase.spark.RowKeyFilter"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/ScanRange.html", "name" : 
"org.apache.hadoop.hbase.spark.ScanRange"}, {"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/SchemaConversi
 onException.html", "name" : 
"org.apache.hadoop.hbase.spark.SchemaConversionException"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/SchemaConverters$.html", "name" : 
"org.apache.hadoop.hbase.spark.SchemaConverters"}, {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/SparkSQLPushDownFilter.html", "name" : 
"org.apache.hadoop.hbase.spark.SparkSQLPushDownFilter"}], 
"org.apache.hadoop.hbase.spark.example.datasources" : [{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/AvroHBaseRecord$.html",
 "case class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/AvroHBaseRecord.html",
 "name" : "org.apache.hadoop.hbase.spark.example.datasources.AvroHBaseRecord"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/AvroSource$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.AvroSource"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/DataType$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasource
 s.DataType"}, {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/HBaseRecord$.html", 
"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/HBaseRecord.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.HBaseRecord"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/HBaseSource$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.HBaseSource"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/IntKeyRecord$.html", 
"case class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/IntKeyRecord.html", 
"name" : "org.apache.hadoop.hbase.spark.example.datasources.IntKeyRecord"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/UserCustomizedSampleException$.html",
 "class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/datasources\/UserCustomizedSampleException.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.datasources.UserCustomizedSampleExceptio
 n"}], "org.apache.hadoop" : [], "org.apache.spark" : [], "org.apache" : [], 
"org.apache.spark.sql.datasources.hbase" : [{"object" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/DataTypeParserWrapper$.html", 
"name" : "org.apache.spark.sql.datasources.hbase.DataTypeParserWrapper"}, 
{"case class" : "org\/apache\/spark\/sql\/datasources\/hbase\/Field.html", 
"name" : "org.apache.spark.sql.datasources.hbase.Field"}, {"object" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/HBaseTableCatalog$.html", "case 
class" : "org\/apache\/spark\/sql\/datasources\/hbase\/HBaseTableCatalog.html", 
"name" : "org.apache.spark.sql.datasources.hbase.HBaseTableCatalog"}, {"case 
class" : "org\/apache\/spark\/sql\/datasources\/hbase\/RowKey.html", "name" : 
"org.apache.spark.sql.datasources.hbase.RowKey"}, {"case class" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/SchemaMap.html", "name" : 
"org.apache.spark.sql.datasources.hbase.SchemaMap"}, {"case class" : 
"org\/apache\/spark\/sql\/datasources\/hbase\/Sche
 maQualifierDefinition.html", "name" : 
"org.apache.spark.sql.datasources.hbase.SchemaQualifierDefinition"}, {"object" 
: "org\/apache\/spark\/sql\/datasources\/hbase\/Utils$.html", "name" : 
"org.apache.spark.sql.datasources.hbase.Utils"}], 
"org.apache.spark.sql.datasources" : [], 
"org.apache.hadoop.hbase.spark.example.hbasecontext" : [{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkDeleteExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkDeleteExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkGetExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkGetExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkPutExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkPutExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkPutExampleFromFile$.html",
 
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkPutExampleFromFile"},
 {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseBulkPutTimestampExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseBulkPutTimestampExample"},
 {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseDistributedScanExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseDistributedScanExample"},
 {"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/HBaseStreamingBulkPutExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.HBaseStreamingBulkPutExample"},
 {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseBulkDeleteExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkDeleteExample"},
 {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseBulkGetExample.html",
 "name" :
  
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkGetExample"}, 
{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseBulkPutExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkPutExample"}, 
{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseDistributedScan.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseDistributedScan"}, 
{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseMapGetPutExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseMapGetPutExample"},
 {"class" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/hbasecontext\/JavaHBaseStreamingBulkPutExample.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseStreamingBulkPutExample"}],
 "org.apache.hadoop.hbase.spark.example.rdd" : [{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseBulkDeleteExample$.htm
 l", "name" : 
"org.apache.hadoop.hbase.spark.example.rdd.HBaseBulkDeleteExample"}, {"object" 
: "org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseBulkGetExample$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.rdd.HBaseBulkGetExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseBulkPutExample$.html", 
"name" : "org.apache.hadoop.hbase.spark.example.rdd.HBaseBulkPutExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseForeachPartitionExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.rdd.HBaseForeachPartitionExample"}, 
{"object" : 
"org\/apache\/hadoop\/hbase\/spark\/example\/rdd\/HBaseMapPartitionExample$.html",
 "name" : 
"org.apache.hadoop.hbase.spark.example.rdd.HBaseMapPartitionExample"}], 
"org.apache.hadoop.hbase" : [], "org.apache.spark.sql" : [], "org" : [], 
"org.apache.hadoop.hbase.spark.protobuf" : [], 
"org.apache.hadoop.hbase.spark.protobuf.generated" : [{"class" : 
"org\/apache\/hadoop\/hbase\/spark\/proto
 buf\/generated\/SparkFilterProtos.html", "name" : 
"org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos"}]};
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f17356a7/hbase-spark/scaladocs/index/index-h.html
----------------------------------------------------------------------
diff --git a/hbase-spark/scaladocs/index/index-h.html 
b/hbase-spark/scaladocs/index/index-h.html
index 4d0788d..da2e536 100644
--- a/hbase-spark/scaladocs/index/index-h.html
+++ b/hbase-spark/scaladocs/index/index-h.html
@@ -30,6 +30,9 @@
       <div class="name">HBaseBulkPutTimestampExample</div>
       <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/example/hbasecontext/package.html" 
class="extype" 
name="org.apache.hadoop.hbase.spark.example.hbasecontext">hbasecontext</a> 
</div>
     </div><div class="entry">
+      <div class="name">HBaseConnectionCacheStat</div>
+      <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/package.html" class="extype" 
name="org.apache.hadoop.hbase.spark">spark</a> </div>
+    </div><div class="entry">
       <div class="name">HBaseConnectionKey</div>
       <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/package.html" class="extype" 
name="org.apache.hadoop.hbase.spark">spark</a> </div>
     </div><div class="entry">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f17356a7/hbase-spark/scaladocs/index/index-n.html
----------------------------------------------------------------------
diff --git a/hbase-spark/scaladocs/index/index-n.html 
b/hbase-spark/scaladocs/index/index-n.html
index f4f7765..57a13d2 100644
--- a/hbase-spark/scaladocs/index/index-n.html
+++ b/hbase-spark/scaladocs/index/index-n.html
@@ -36,7 +36,16 @@
       <div class="name">nullable</div>
       <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/SchemaConverters$$SchemaType.html" 
class="extype" 
name="org.apache.hadoop.hbase.spark.SchemaConverters.SchemaType">SchemaType</a> 
</div>
     </div><div class="entry">
+      <div class="name">numActiveConnections</div>
+      <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/HBaseConnectionCacheStat.html" 
class="extype" 
name="org.apache.hadoop.hbase.spark.HBaseConnectionCacheStat">HBaseConnectionCacheStat</a>
 </div>
+    </div><div class="entry">
+      <div class="name">numActualConnectionsCreated</div>
+      <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/HBaseConnectionCacheStat.html" 
class="extype" 
name="org.apache.hadoop.hbase.spark.HBaseConnectionCacheStat">HBaseConnectionCacheStat</a>
 </div>
+    </div><div class="entry">
       <div class="name">numPartitions</div>
       <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/BulkLoadPartitioner.html" class="extype" 
name="org.apache.hadoop.hbase.spark.BulkLoadPartitioner">BulkLoadPartitioner</a>
 </div>
+    </div><div class="entry">
+      <div class="name">numTotalRequests</div>
+      <div class="occurrences"><a 
href="../org/apache/hadoop/hbase/spark/HBaseConnectionCacheStat.html" 
class="extype" 
name="org.apache.hadoop.hbase.spark.HBaseConnectionCacheStat">HBaseConnectionCacheStat</a>
 </div>
     </div></body>
       </html>
\ No newline at end of file

Reply via email to