[ 
https://issues.apache.org/jira/browse/PHOENIX-3751?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ankit Singhal updated PHOENIX-3751:
-----------------------------------
    Fix Version/s: 4.11.0

> spark 2.1 with Phoenix 4.10 load data as dataframe fail, NullPointerException
> -----------------------------------------------------------------------------
>
>                 Key: PHOENIX-3751
>                 URL: https://issues.apache.org/jira/browse/PHOENIX-3751
>             Project: Phoenix
>          Issue Type: Bug
>    Affects Versions: 4.10.0
>         Environment: HBase 1.14
> spark 2.10
> phoenix: 4.10
>            Reporter: Nan Xu
>             Fix For: 4.11.0
>
>         Attachments: PHOENIX-3751.patch
>
>
> create phoenix table: 
> create table phoenix.quote (  
> sym Varchar not null, 
> src varchar,
> kdbPublishTime time not null,
> location varchar,
> bid DOUBLE,
> ask DOUBLE,
> bsize unsigned_int,
> asize unsigned_int, 
> srcTime time,
> layer varchar,
> expiryTime time,
> quoteId varchar,
> recvTime time, 
> distTime  time,
> "TIME" time
> CONSTRAINT quote_pk PRIMARY KEY (sym, src, kdbPublishTime)) 
> COMPRESSION='SNAPPY', DATA_BLOCK_ENCODING='FAST_DIFF', VERSIONS=1000
> insert data:
> SYM   SRC     KDBPUBLISHTIME  LOCATION        BID     ASK     BSIZE   ASIZE   
> SRCTIME            LAYER        EXPIRYTIME      QUOTEID       RECVTIME     
> DISTTIME     TIME
> 6AH7  cme1    03:42:59        N              0.7471   0.7506  20      25      
> 03:42:59           (null)           (null)      (null)         03:42:59     
> (null)      03:42:59
> 6AH7  cme1    03:42:59        N              0.7474   0.7506  25      25      
> 03:42:59           (null)           (null)      (null)         03:42:59     
> (null)      03:42:59
> val spark = SparkSession
>     .builder()
>     .appName("load_avro")
>     .master("local[1]")
>     .config("spark.sql.warehouse.dir", "file:/tmp/spark-warehouse")
>     .getOrCreate()
>  val df = spark.sqlContext.phoenixTableAsDataFrame("PHOENIX.QUOTE", 
> Seq("SYM","SRC", "EXPIRYTIME"), zkUrl = Some("a1.cluster:2181"))
>   df.show(100)
> problem is in PhoenixRDD:140
>  val rowSeq = columns.map { case (name, sqlType) =>
>         val res = pr.resultMap(name)
>           // Special handling for data types
>           if (dateAsTimestamp && (sqlType == 91 || sqlType == 19)) { // 91 is 
> the defined type for Date and 19 for UNSIGNED_DATE
>             new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
>           } else if (sqlType == 92 || sqlType == 18) { // 92 is the defined 
> type for Time and 18 for UNSIGNED_TIME
>             new java.sql.Timestamp(res.asInstanceOf[java.sql.Time].getTime)
>           } else {
>             res
>           }
>       }
> res.asInstanceOf[java.sql.Time].getTime could be null and get NPE.



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to