gpt4 book ai didi

java - 找不到 Hadoop 作业类

转载 作者:可可西里 更新时间:2023-11-01 16:54:14 26 4
gpt4 key购买 nike

您好,我遇到了麻烦,但我还无法从类似的话题中获得帮助。我正在做一个 hadoop 作业的例子,我现在只是想从 IDE 运行它。这是我的源代码

package org.myorg;

import java.io.IOException;
import java.util.*;

import org.apache.hadoop.io.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.*;

import org.apache.hadoop.util.*;


public class WordCount
{

public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();

public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException
{
String line = value.toString().toLowerCase().replaceAll("\\p{Punct}|\\d","");
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens())
{

word.set(tokenizer.nextToken());
output.collect(word, one);

}// end while

}// end public void map

}// end public static class Map


public static class Reduce extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable>
{
public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException
{
int sum = 0;
while (values.hasNext())
{
sum+= values.next().get();
}// end while

output.collect(key, new IntWritable(sum));

}// end public void reduce

}// end public static class Reduce

public static void main(String[] args) throws Exception
{
JobConf conf = new JobConf(org.myorg.WordCount.class);
conf.setJobName("wordcount");

conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);

conf.setMapperClass(Map.class);
conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);

conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);

FileInputFormat.setInputPaths(conf, new Path(args[0]));
FileOutputFormat.setOutputPath(conf, new Path(args[1]));

JobClient.runJob(conf);

}// end main

}//end public class WordCount

这是我遇到的异常。有人有什么想法吗?

Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/log4j/Level
at org.apache.hadoop.mapred.JobConf.<clinit>(JobConf.java:362)
at org.myorg.WordCount.main(WordCount.java:56)
Caused by: java.lang.ClassNotFoundException: org.apache.log4j.Level
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)

最佳答案

很明显,是少了log4j的jar文件。您应该将 jar 添加到您的类路径中。

关于java - 找不到 Hadoop 作业类,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/30769772/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com