a092cc commented on issue #4283:
URL:
https://github.com/apache/dolphinscheduler/issues/4283#issuecomment-1132396853
>
需要把hive-site.xml 加载到conf中,我 * The SparkRuntimeEnvironment is responsible
for creating SparkSession and SparkExecution
*/
@@ -47,14 +52,29 @@ public class SparkRuntimeEnvironment {
}
public void prepare() {
- sparkSession =
SparkSession.builder().config(createSparkConf()).getOrCreate();
+ sparkSession = SparkSession.builder().config(createSparkConf())
+ .enableHiveSupport()
+ .getOrCreate();
}
private SparkConf createSparkConf() {
SparkConf conf = new SparkConf();
+
this.config.entrySet()
.forEach(entry -> conf.set(entry.getKey(),
String.valueOf(entry.getValue())));
+
conf.set("spark.sql.crossJoin.enabled","true");
+
+ Configuration cf = new Configuration();
+ cf.addResource("hive-site.xml");
+ cf.addResource("hdfs-site.xml");
+ cf.addResource("core-site.xml");
+ for (Map.Entry<String, String> next : cf) {
+ String key = next.getKey();
+ String value = next.getValue();
+ conf.set(key, value);
+ }
+
return conf;
}
是这么加的
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]