gpt4 book ai didi

java - Java.lang.ClassnotfoundException hadoop

转载 作者:行者123 更新时间:2023-12-02 22:02:05 26 4
gpt4 key购买 nike

我是hadoop和mapreduce编程的新手。我已经编写了一个计算平均值的程序。我在/ home / cloudera中准备了一个jar文件。我创建了一个文件夹,以将输入和输出提供给/ home / cloudera / StockPrediction / input /和/ home / cloudera / StockPrediction / output中的程序。
我的代码是:

package mainpackage;


import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;



import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.DoubleWritable;
//import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;

import supportpackage.DoubleArrayWritable;
//import supportpackage.IntArrayWritable;


public class MainFile {
private static final int WEEKDAYS = 5;
private static int avgTokensLength = 0;
/*******************************************************************************************************/
//Job 1: Average Mapper Class
public static class avgMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, ArrayWritable>{

@Override
public void map(LongWritable key, Text value,OutputCollector<Text, ArrayWritable> output, Reporter reporter) throws IOException {
String vString= value.toString();
StringTokenizer tokens = new StringTokenizer(vString, ",");
String company = tokens.nextToken();

avgTokensLength = tokens.countTokens();
DoubleWritable prices[] = new DoubleWritable[WEEKDAYS];

int index = 0;
while (tokens.hasMoreTokens()) {
prices[index] = new DoubleWritable(Double.parseDouble(tokens.nextToken()));
index++;

if (index == WEEKDAYS) {
output.collect(new Text(company), new DoubleArrayWritable(prices));
index = 0;
}
}

}
}

/*************************************************************************************************/
// Job1: Average Reducer class
public static class avgReduce extends MapReduceBase
implements Reducer<Text, DoubleArrayWritable, Text, DoubleArrayWritable> {

private static int newIndex = 0;

// Average Reduce function
public void reduce(Text key, Iterator<DoubleArrayWritable> values,
OutputCollector<Text, DoubleArrayWritable> output, Reporter reporter) throws IOException {

DoubleWritable prices[] = new DoubleWritable[WEEKDAYS];
DoubleWritable avgPrices[] = new DoubleWritable[avgTokensLength / WEEKDAYS];
DoubleWritable sum = new DoubleWritable(0);

int index = 0, count = 0;
while (values.hasNext()) {
index = 0;
count = 0;
ArrayWritable val = values.next();
for (Writable writable : val.get()) {
prices[index] = (DoubleWritable) writable;
index++;
}

// index = 0;
count = 0;
for (int i = 0; i < prices.length; i++) {
sum = new DoubleWritable(sum.get() + prices[i].get());
count++;


if (count == WEEKDAYS) {
avgPrices[newIndex] = new DoubleWritable(sum.get() / WEEKDAYS);
newIndex++;
sum = new DoubleWritable(0);
count = 0;
}
}
}
output.collect(key, new DoubleArrayWritable(avgPrices));
newIndex = 0;
}
}


public static void main(String args[])throws Exception{

JobConf average = new JobConf(MainFile.class);
/***********************************************************/
//JOB 1:
average.setJobName("Average");
average.setMapOutputKeyClass(Text.class);
average.setMapOutputValueClass(DoubleArrayWritable.class);
average.setMapperClass(avgMapper.class);
average.setReducerClass(avgReduce.class);
average.setInputFormat(TextInputFormat.class);
average.setOutputFormat(TextOutputFormat.class);
//String inputfile=args[0];
//String outputfile=args[1];
FileInputFormat.setInputPaths(average, new Path(args[0]));
FileOutputFormat.setOutputPath(average, new Path(args[1]));

JobClient.runJob(average);

}

}

我创建了一个jar文件,并尝试使用以下命令运行:
hadoop jar /home/cloudera/average.jar /home/cloudera/StockPrediction/input/qualcomm.csv /home/cloudera/StockPrediction/output/qualcomm

我收到此错误。谁能帮我吗。
Exception in thread "main" java.lang.ClassNotFoundException: /home/cloudera/StockPrediction/input/qualcomm/csv
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:270)
at org.apache.hadoop.util.RunJar.run(RunJar.java:214)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)


最佳答案

您还没有在JAR名称之后指定主类
试试这个命令:hadoop jar /home/cloudera/average.jar mainpackage.MainFile /home/cloudera/StockPrediction/input/qualcomm.csv /home/cloudera/StockPrediction/output/qualcomm

关于java - Java.lang.ClassnotfoundException hadoop,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/60143459/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com