[ 
https://issues.apache.org/jira/browse/HIVE-25637?focusedWorklogId=707487&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-707487
 ]

ASF GitHub Bot logged work on HIVE-25637:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 12/Jan/22 13:38
            Start Date: 12/Jan/22 13:38
    Worklog Time Spent: 10m 
      Work Description: maheshk114 commented on a change in pull request #2944:
URL: https://github.com/apache/hive/pull/2944#discussion_r783073682



##########
File path: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
##########
@@ -311,10 +325,18 @@ private void getKafkaCredentials(MapWork work, DAG dag, 
JobConf conf) {
         break;
       }
     }
+
+    for (TableDesc tableDesc : fileSinkTableDescs) {
+      kafkaBrokers = (String) 
tableDesc.getProperties().get("kafka.bootstrap.servers"); //FIXME: 
KafkaTableProperties
+      if (kafkaBrokers != null && !kafkaBrokers.isEmpty()) {

Review comment:
       The common code can be reused instead of another copy




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 707487)
    Time Spent: 1.5h  (was: 1h 20m)

> Hive on Tez: inserting data failing into the non native hive external table 
> managed by kafka storage handler 
> -------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-25637
>                 URL: https://issues.apache.org/jira/browse/HIVE-25637
>             Project: Hive
>          Issue Type: Improvement
>            Reporter: László Bodor
>            Assignee: László Bodor
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 1.5h
>  Remaining Estimate: 0h
>
> This is the followup for HIVE-23408, repro is below:
> {code}
> CREATE EXTERNAL TABLE `kafka_table`(             
>   `timestamp` timestamp COMMENT 'from deserializer',
>   `page` string COMMENT 'from deserializer',     
>   `newpage` boolean COMMENT 'from deserializer', 
>   `added` int COMMENT 'from deserializer',       
>   `deleted` bigint COMMENT 'from deserializer',  
>   `delta` double COMMENT 'from deserializer')
> ROW FORMAT SERDE                                 
>   'org.apache.hadoop.hive.kafka.KafkaSerDe'      
> STORED BY                                        
>   'org.apache.hadoop.hive.kafka.KafkaStorageHandler'
> WITH SERDEPROPERTIES (                           
>   'serialization.format'='1')                    
> LOCATION                                         
>   
> 'hdfs://lbodorkafkaunsec-2.lbodorkafkaunsec.root.hwx.site:8020/warehouse/tablespace/external/hive/kafka_table'
> TBLPROPERTIES (                                  
>   'bucketing_version'='2',                       
>   'hive.kafka.max.retries'='6',                  
>   'hive.kafka.metadata.poll.timeout.ms'='30000', 
>   'hive.kafka.optimistic.commit'='false',        
>   'hive.kafka.poll.timeout.ms'='5000',           
>   
> 'kafka.bootstrap.servers'='lbodorkafkaunsec-1.lbodorkafkaunsec.root.hwx.site:9092,lbodorkafkaunsec-2.lbodorkafkaunsec.root.hwx.site:9092,lbodorkafkaunsec-3.lbodorkafkaunsec.root.hwx.site:9092',
>   'kafka.serde.class'='org.apache.hadoop.hive.serde2.JsonSerDe',
>   'kafka.topic'='hit-topic-1',                   
>   'kafka.write.semantic'='AT_LEAST_ONCE');
> SELECT COUNT(*) FROM kafka_table WHERE `__timestamp` > 1000 * 
> to_unix_timestamp(CURRENT_TIMESTAMP - interval '10' MINUTES); # works due to 
> HIVE-23408
> insert into kafka_table values(NULL, 'comment', 0, 1, 2, 3.0, NULL, NULL, 
> NULL, NULL); # fails
> {code}
> exception I get:
> {code}
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: 
> org.apache.hadoop.hive.ql.metadata.HiveException: 
> org.apache.kafkaesque.common.KafkaException: Failed to construct kafka 
> producer
>       at 
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:829)
>       at 
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:1004)
>       at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:937)
>       at 
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95)
>       at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:937)
>       at 
> org.apache.hadoop.hive.ql.exec.UDTFOperator.forwardUDTFOutput(UDTFOperator.java:133)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.UDTFCollector.collect(UDTFCollector.java:45)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.forward(GenericUDTF.java:110)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline.process(GenericUDTFInline.java:64)
>       at 
> org.apache.hadoop.hive.ql.exec.UDTFOperator.process(UDTFOperator.java:116)
>       at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:937)
>       at 
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95)
>       at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:937)
>       at 
> org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:128)
>       at 
> org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:152)
>       at 
> org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:552)
>       ... 20 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: 
> org.apache.kafkaesque.common.KafkaException: Failed to construct kafka 
> producer
>       at 
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:282)
>       at 
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketForFileIdx(FileSinkOperator.java:872)
>       at 
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:823)
>       ... 35 more
> Caused by: org.apache.kafkaesque.common.KafkaException: Failed to construct 
> kafka producer
>       at 
> org.apache.kafkaesque.clients.producer.KafkaProducer.<init>(KafkaProducer.java:432)
>       at 
> org.apache.kafkaesque.clients.producer.KafkaProducer.<init>(KafkaProducer.java:313)
>       at 
> org.apache.hadoop.hive.kafka.SimpleKafkaWriter.<init>(SimpleKafkaWriter.java:80)
>       at 
> org.apache.hadoop.hive.kafka.KafkaOutputFormat.getHiveRecordWriter(KafkaOutputFormat.java:60)
>       at 
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:294)
>       at 
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:279)
>       ... 37 more
> Caused by: org.apache.kafkaesque.common.KafkaException: 
> javax.security.auth.login.LoginException: Could not login: the client is 
> being asked for a password, but the Kafka client code does not currently 
> support obtaining a password from the user. not available to garner  
> authentication information from the user
>       at 
> org.apache.kafkaesque.common.network.SaslChannelBuilder.configure(SaslChannelBuilder.java:158)
>       at 
> org.apache.kafkaesque.common.network.ChannelBuilders.create(ChannelBuilders.java:146)
>       at 
> org.apache.kafkaesque.common.network.ChannelBuilders.clientChannelBuilder(ChannelBuilders.java:67)
>       at 
> org.apache.kafkaesque.clients.ClientUtils.createChannelBuilder(ClientUtils.java:99)
>       at 
> org.apache.kafkaesque.clients.producer.KafkaProducer.newSender(KafkaProducer.java:450)
>       at 
> org.apache.kafkaesque.clients.producer.KafkaProducer.<init>(KafkaProducer.java:421)
>       ... 42 more
> Caused by: javax.security.auth.login.LoginException: Could not login: the 
> client is being asked for a password, but the Kafka client code does not 
> currently support obtaining a password from the user. not available to garner 
>  authentication information from the user
>       at 
> com.sun.security.auth.module.Krb5LoginModule.promptForPass(Krb5LoginModule.java:944)
>       at 
> com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Krb5LoginModule.java:764)
>       at 
> com.sun.security.auth.module.Krb5LoginModule.login(Krb5LoginModule.java:617)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at javax.security.auth.login.LoginContext.invoke(LoginContext.java:755)
>       at 
> javax.security.auth.login.LoginContext.access$000(LoginContext.java:195)
>       at javax.security.auth.login.LoginContext$4.run(LoginContext.java:682)
>       at javax.security.auth.login.LoginContext$4.run(LoginContext.java:680)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at 
> javax.security.auth.login.LoginContext.invokePriv(LoginContext.java:680)
>       at javax.security.auth.login.LoginContext.login(LoginContext.java:587)
>       at 
> org.apache.kafkaesque.common.security.authenticator.AbstractLogin.login(AbstractLogin.java:60)
>       at 
> org.apache.kafkaesque.common.security.kerberos.KerberosLogin.login(KerberosLogin.java:103)
>       at 
> org.apache.kafkaesque.common.security.authenticator.LoginManager.<init>(LoginManager.java:62)
>       at 
> org.apache.kafkaesque.common.security.authenticator.LoginManager.acquireLoginManager(LoginManager.java:105)
>       at 
> org.apache.kafkaesque.common.network.SaslChannelBuilder.configure(SaslChannelBuilder.java:147)
>       ... 47 more
> {code}



--
This message was sent by Atlassian Jira
(v8.20.1#820001)

Reply via email to