Read/Write hive table when running spark2 by oozie
问题描述
问题查找
HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$SPARK_CONF_DIR/yarn-conf}
//https://github.com/apache/oozie/blob/release-4.3.1/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/ShellMain.java
public class ShellMain extends LauncherMain {
private void prepareHadoopConfigs(Configuration actionConf, Map<String, String> envp, File currDir) throws IOException {
if (actionConf.getBoolean(CONF_OOZIE_SHELL_SETUP_HADOOP_CONF_DIR, false)) {
String actionXml = envp.get(OOZIE_ACTION_CONF_XML);
if (actionXml != null) {
File confDir = new File(currDir, "oozie-hadoop-conf-" + System.currentTimeMillis());
writeHadoopConfig(actionXml, confDir);
if (actionConf.getBoolean(CONF_OOZIE_SHELL_SETUP_HADOOP_CONF_DIR_WRITE_LOG4J_PROPERTIES, true)) {
System.out.println("Writing " + LOG4J_PROPERTIES + " to " + confDir);
writeLoggerProperties(actionConf, confDir);
}
System.out.println("Setting " + HADOOP_CONF_DIR + " and " + YARN_CONF_DIR
+ " to " + confDir.getAbsolutePath());
envp.put(HADOOP_CONF_DIR, confDir.getAbsolutePath());
envp.put(YARN_CONF_DIR, confDir.getAbsolutePath());
}
}
}
}解决方法
Last updated