gpt4 book ai didi

java - 尝试在 shell 脚本中同时运行 hadoop MapReduce 命令和 linux 命令

转载 作者:可可西里 更新时间:2023-11-01 16:11:18 29 4
gpt4 key购买 nike

我有一个这样的 shell 脚本。

#!/bin/sh
/home/hduser/Downloads/hadoop/bin/stop-all.sh
echo "RUNNING HADOOP PROGRAM"
cd /home/hduser/Downloads/hadoop
sudo rm -R /tmp/*
sudo rm -R /app/*
cd
sudo mkdir -p /app/hadoop/tmp
sudo chown hduser:hadoop /app/hadoop/tmp
sudo chmod 750 /app/hadoop/tmp
hadoop namenode -format
/home/hduser/Downloads/hadoop/bin/start-all.sh
jps
hadoop dfs -mkdir -p ~/Downloads/hadoop/input
hadoop dfs -copyFromLocal /home/hduser/Desktop/iris.arff ~/Downloads/hadoop/input
hadoop jar ~/Desktop/try.jar 2 weka.classifiers.trees.J48 ~/Downloads/hadoop/input ~/Downloads/hadoop/output
/home/hduser/Downloads/hadoop/bin/stop-all.sh

我在我的 java 程序中调用这个脚本是这样的

    public class UIinput 
{
public static void main(String[] args) throws IOException
{

// Runtime.getRuntime().exec("/home/hduser/Desktop/initial.sh");
new ProcessBuilder("/home/hduser/Desktop/initial.sh");
ProcessBuilder pb = new ProcessBuilder("/home/hduser/Desktop/initial.sh");
Process process=pb.start();
InputStream is = process.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);

String line;

System.out.printf("Output of running %s is:",
Arrays.toString(args));

while ((line = br.readLine()) != null)
{
System.out.println(line);
}
}
}

我的 start-all.sh、stop-all.sh 和 echo 命令正在脚本中执行,但其他命令没有执行。我的输出就像

Output of running [] is:no jobtracker to stop
localhost: no tasktracker to stop
no namenode to stop
localhost: no datanode to stop
localhost: no secondarynamenode to stop
RUNNING HADOOP PROGRAM
starting namenode, logging to /home/hduser/Downloads/hadoop/libexec/../logs/hadoop-hduser-namenode-ubuntu.out
localhost: starting datanode, logging to /home/hduser/Downloads/hadoop/libexec/../logs/hadoop-hduser-datanode-ubuntu.out
localhost: starting secondarynamenode, logging to /home/hduser/Downloads/hadoop/libexec/../logs/hadoop-hduser-secondarynamenode-ubuntu.out
starting jobtracker, logging to /home/hduser/Downloads/hadoop/libexec/../logs/hadoop-hduser-jobtracker-ubuntu.out
localhost: starting tasktracker, logging to /home/hduser/Downloads/hadoop/libexec/../logs/hadoop-hduser-tasktracker-ubuntu.out
stopping jobtracker
localhost: stopping tasktracker
no namenode to stop

有人能帮我吗?当我运行我的 java 代码时,我希望所有命令都在 shell 脚本中执行。谢谢

最佳答案

使用下面的代码运行你的脚本并在系统输出中查看问题的详细信息

package test;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;


public class CommandLineExecutor {

public final int EXEC_OK = 0;
public final int EXEC_FAILED = 1;

public static void main(String[] args) {

CommandLineExecutor cmd = new CommandLineExecutor();
String[] script = new String[]{ "/home/hduser/Desktop/initial.sh"};
boolean joinToProcess = true; //Main threads waits process finished.
int result = cmd.execute(script, joinToProcess);
System.out.println( result == 0 ? "Script succesfully run" : "Script failed" );

}

public int execute(String[] cmd, boolean joinToProcess) {
Runtime runtime = Runtime.getRuntime();
Process proc = null;
try {
System.out.println("executing cmd: "+concat(cmd));
proc = runtime.exec(cmd);
StreamProcessor errorStreamProcessor = new StreamProcessor(proc.getErrorStream());
StreamProcessor outputStreamProcessor = new StreamProcessor(proc.getInputStream());
errorStreamProcessor.start();
outputStreamProcessor.start();
} catch (Exception e) {
e.printStackTrace(System.out);
return EXEC_FAILED;
}
try {
int result = EXEC_OK;
if(joinToProcess)
result = proc.waitFor();
return result;
} catch (InterruptedException e) {
System.out.println("Error at executing command: " + concat(cmd) );
e.printStackTrace(System.out);
}
return EXEC_FAILED;

}

public static String concat(String[] array) {
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < array.length; i++) {
if (i > 0)
buffer.append(' ');
buffer.append(array[i]);
}
return buffer.toString();
}



class StreamProcessor extends Thread {

private InputStream inputStream;

public StreamProcessor(InputStream is) {
this.inputStream = is;
}

public void run() {
try {
InputStreamReader isr = new InputStreamReader(inputStream);
BufferedReader br = new BufferedReader(isr);
while (true) {
String s = br.readLine();
if (s == null)
break;
System.out.println(s);
}
} catch (IOException e) {
e.printStackTrace(System.out);
}
}

}

}

关于java - 尝试在 shell 脚本中同时运行 hadoop MapReduce 命令和 linux 命令,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/30257951/

29 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com