peReference[HashMap[String, String]]() {});
>>
>>
>> var ts:Long= maprecord.get("ts").toLong
>> var tweetID:Long= maprecord.get("id").toLong
>> val key=ts+"_"+tweetID;
>> val put=new Put(Bytes.toBytes(key))
>>
ass",
>>> classOf[TableOutputFormat[String]], classOf[OutputFormat[String,
>>> BoxedUnit]])
>>>
>>> rdd.map ( record =>(new ImmutableBytesWritable,{
>>>
>>>
>>> var maprecord = new HashMap[String, String];
>>>
e),Bytes.toBytes(kv._1),Bytes.toBytes(kv._2))
}
)
put
}
)
).saveAsNewAPIHadoopDataset(hconf)
})
help me out in solving this as it is urgent for me
--
View this message in context:
http:/
).saveAsNewAPIHadoopDataset(hconf)
})
help me out in solving this as it is urgent for me
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/HBase-Spark-Streaming-giving-error-after-restore-tp25090.html
Sent from the Apache Spark
.foreach(kv => {
> // println(kv._1+" - "+kv._2)
>
>
> put.add(Bytes.toBytes(colfamily.value),Bytes.toBytes(kv._1),Bytes.toBytes(kv._2))
>
>
> }
>)
>put
>
>