morningman commented on PR #19841:
URL: https://github.com/apache/doris/pull/19841#issuecomment-1555941889
Please patch this to pass the check:
Save it to `1.diff` and run: `patch -p1 < 1.diff
```
diff --git a/be/src/io/hdfs_builder.cpp b/be/src/io/hdfs_builder.cpp
index c0175af2d2..be1281441a 100644
--- a/be/src/io/hdfs_builder.cpp
+++ b/be/src/io/hdfs_builder.cpp
@@ -49,12 +49,12 @@ Status HDFSCommonBuilder::run_kinit() {
return Status::InvalidArgument("Invalid hdfs_kerberos_principal or
hdfs_kerberos_keytab");
}
std::string ticket_path = TICKET_CACHE_PATH + generate_uuid_string();
- const char *krb_home = getenv("KRB_HOME");
+ const char* krb_home = getenv("KRB_HOME");
std::string krb_home_str(krb_home ? krb_home : "");
fmt::memory_buffer kinit_command;
if (krb_home_str.empty()) {
- fmt::format_to(kinit_command, "kinit -c {} -R -t {} -k {}",
ticket_path, hdfs_kerberos_keytab,
- hdfs_kerberos_principal);
+ fmt::format_to(kinit_command, "kinit -c {} -R -t {} -k {}",
ticket_path,
+ hdfs_kerberos_keytab, hdfs_kerberos_principal);
} else {
// Assign kerberos home in env, get kinit in kerberos home
fmt::format_to(kinit_command, krb_home_str + "/bin/kinit -c {} -R
-t {} -k {}", ticket_path,
@@ -67,10 +67,11 @@ Status HDFSCommonBuilder::run_kinit() {
if (!rc) {
return Status::InternalError("Kinit failed, errMsg: " + msg);
}
- #ifdef USE_LIBHDFS3
- hdfsBuilderSetPrincipal(hdfs_builder,
hdfs_kerberos_principal.c_str());
- #endif
- hdfsBuilderConfSetStr(hdfs_builder,
"hadoop.security.kerberos.ticket.cache.path", ticket_path.c_str());
+#ifdef USE_LIBHDFS3
+ hdfsBuilderSetPrincipal(hdfs_builder, hdfs_kerberos_principal.c_str());
+#endif
+ hdfsBuilderConfSetStr(hdfs_builder,
"hadoop.security.kerberos.ticket.cache.path",
+ ticket_path.c_str());
return Status::OK();
}
@@ -116,22 +117,28 @@ Status createHDFSBuilder(const THdfsParams&
hdfsParams, HDFSCommonBuilder* build
hdfsBuilderSetUserName(builder->get(),
hdfsParams.hdfs_kerberos_principal.c_str());
} else if (hdfsParams.__isset.user) {
hdfsBuilderSetUserName(builder->get(), hdfsParams.user.c_str());
+#ifdef USE_HADOOP_HDFS
hdfsBuilderSetKerb5Conf(builder->get(), nullptr);
hdfsBuilderSetKeyTabFile(builder->get(), nullptr);
+#endif
}
if (hdfsParams.__isset.hdfs_kerberos_keytab) {
builder->need_kinit = true;
builder->hdfs_kerberos_keytab = hdfsParams.hdfs_kerberos_keytab;
+#ifdef USE_HADOOP_HDFS
hdfsBuilderSetKeyTabFile(builder->get(),
hdfsParams.hdfs_kerberos_keytab.c_str());
+#endif
}
// set other conf
if (hdfsParams.__isset.hdfs_conf) {
for (const THdfsConf& conf : hdfsParams.hdfs_conf) {
hdfsBuilderConfSetStr(builder->get(), conf.key.c_str(),
conf.value.c_str());
+#ifdef USE_HADOOP_HDFS
// Set krb5.conf, we should define java.security.krb5.conf in
catalog properties
if (strcmp(conf.key.c_str(), "java.security.krb5.conf") == 0) {
hdfsBuilderSetKerb5Conf(builder->get(), conf.value.c_str());
}
+#endif
}
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]