gpt4 book ai didi

org.apache.hadoop.yarn.conf.YarnConfiguration.writeXml()方法的使用及代码示例

转载 作者:知者 更新时间:2024-03-18 01:54:40 25 4
gpt4 key购买 nike

本文整理了Java中org.apache.hadoop.yarn.conf.YarnConfiguration.writeXml()方法的一些代码示例,展示了YarnConfiguration.writeXml()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。YarnConfiguration.writeXml()方法的具体详情如下:
包路径:org.apache.hadoop.yarn.conf.YarnConfiguration
类名称:YarnConfiguration
方法名:writeXml

YarnConfiguration.writeXml介绍

暂无

代码示例

代码示例来源:origin: apache/incubator-gobblin

clusterConf.writeXml(os);

代码示例来源:origin: uber/AthenaX

private YarnClusterConfiguration prepareYarnCluster() throws IOException, URISyntaxException {
 yarnCluster.init(yarnConf);
 yarnCluster.start();
 yarnConf.set(RM_ADDRESS, yarnCluster.getResourceManager().getConfig().get(RM_ADDRESS));
 File yarnSite = new File(workDir, "yarn-site.xml");
 try (PrintWriter pw = new PrintWriter(new FileWriter(yarnSite))) {
  yarnConf.writeXml(pw);
 }
 Path flinkUberJar = new Path(new File(workDir, "flink.jar").toURI());
 Path flinkConfYaml = new Path(new File(workDir, "flink-conf.yaml").toURI());
 @SuppressWarnings("ConstantConditions")
 Path log4jPath = new Path(Thread.currentThread().getContextClassLoader().getResource("log4j.properties").toURI());
 Set<Path> resourcesToLocalize = new HashSet<>(Arrays.asList(flinkUberJar, flinkConfYaml, log4jPath));
 String home = workDir.toURI().toString();
 return new YarnClusterConfiguration(
   yarnConf,
   home,
   flinkUberJar,
   resourcesToLocalize,
   systemJars(yarnSite));
}

代码示例来源:origin: org.apache.hadoop/hadoop-yarn-api

public static void main(String[] args) throws Exception {
  new YarnConfiguration(new Configuration()).writeXml(System.out);
 }
}

代码示例来源:origin: io.hops/hadoop-yarn-api

public static void main(String[] args) throws Exception {
 new YarnConfiguration(new Configuration()).writeXml(System.out);
}

代码示例来源:origin: yahoo/storm-yarn

out = fs.create(yarn_site_xml);
writer = new OutputStreamWriter(out);
yarnConf.writeXml(writer);
writer.close();
out.close();

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

private ApplicationSubmissionContext buildSubmitContext(
   YARNRunner yarnRunner, JobConf jobConf) throws IOException {
  File jobxml = new File(testWorkDir, MRJobConfig.JOB_CONF_FILE);
  OutputStream out = new FileOutputStream(jobxml);
  conf.writeXml(out);
  out.close();

  File jobsplit = new File(testWorkDir, MRJobConfig.JOB_SPLIT);
  out = new FileOutputStream(jobsplit);
  out.close();

  File jobsplitmetainfo = new File(testWorkDir,
    MRJobConfig.JOB_SPLIT_METAINFO);
  out = new FileOutputStream(jobsplitmetainfo);
  out.close();

  return yarnRunner.createApplicationSubmissionContext(jobConf,
    testWorkDir.toString(), new Credentials());
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

@Test(timeout=20000)
public void testJobSubmissionFailure() throws Exception {
 when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
 thenReturn(appId);
 ApplicationReport report = mock(ApplicationReport.class);
 when(report.getApplicationId()).thenReturn(appId);
 when(report.getDiagnostics()).thenReturn(failString);
 when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.FAILED);
 when(resourceMgrDelegate.getApplicationReport(appId)).thenReturn(report);
 Credentials credentials = new Credentials();
 File jobxml = new File(testWorkDir, "job.xml");
 OutputStream out = new FileOutputStream(jobxml);
 conf.writeXml(out);
 out.close();
 try {
  yarnRunner.submitJob(jobId, testWorkDir.getAbsolutePath().toString(), credentials);
 } catch(IOException io) {
  LOG.info("Logging exception:", io);
  assertTrue(io.getLocalizedMessage().contains(failString));
 }
}

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com