gpt4 book ai didi

java - 配置对象时链映射器错误

转载 作者:行者123 更新时间:2023-12-02 21:48:37 24 4
gpt4 key购买 nike

我是Hadoop的新手,正在尝试使用链映射器。我使用了http://gandhigeet.blogspot.com/2012/12/as-discussed-in-previous-post-hadoop.html中提供的代码,但是运行jar时出现此错误。

14/04/11 12:06:39 INFO util.NativeCodeLoader: Loaded the native-hadoop library
14/04/11 12:06:39 WARN snappy.LoadSnappy: Snappy native library not loaded
14/04/11 12:06:39 INFO mapred.FileInputFormat: Total input paths to process : 1
14/04/11 12:06:39 INFO mapred.JobClient: Running job: job_201404111150_0004
14/04/11 12:06:40 INFO mapred.JobClient: map 0% reduce 0%
14/04/11 12:07:02 INFO mapred.JobClient: Task Id : attempt_201404111150_0004_m_000000_0, Status : FAILED
java.lang.RuntimeException: Error in configuring object
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:416)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:622)
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
... 9 more
Caused by: java.lang.RuntimeException: Error in configuring object
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
at org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
... 14 more
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:622)
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
... 17 more
Caused by: java.lang.RuntimeException: java.lang.NoSuchMethodException: ChainDriver$UpperCaserMapper.<init>()
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:115)
at org.apache.hadoop.mapred.lib.Chain.configure(Chain.java:330)
at org.apache.hadoop.mapred.lib.ChainMapper.configure(ChainMapper.java:152)
... 22 more
Caused by: java.lang.NoSuchMethodException: ChainDriver$UpperCaserMapper.<init>()
at java.lang.Class.getConstructor0(Class.java:2813)
at java.lang.Class.getDeclaredConstructor(Class.java:2053)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:109)
... 24 more

我的完整代码是:
import java.io.IOException;
import java.net.URI;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.lib.ChainMapper;
import org.apache.hadoop.mapred.lib.ChainReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class ChainDriver extends Configured implements Tool {
public static class TokenizerMapper extends MapReduceBase implements
Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();

public void map(LongWritable key, Text value, OutputCollector output,
Reporter reporter) throws IOException {
String line = value.toString();
System.out.println("Line:" + line);
StringTokenizer itr = new StringTokenizer(line);
while (itr.hasMoreTokens()) {
word.set(itr.nextToken());
output.collect(word, one);
}
}
}

public class UpperCaserMapper extends MapReduceBase implements
Mapper<Text, IntWritable, Text, IntWritable> {

public void map(Text key, IntWritable value, OutputCollector output,
Reporter reporter) throws IOException {
String word = key.toString().toUpperCase();
System.out.println("Upper Case:" + word);
output.collect(new Text(word), value);
}
}

public class WordCountReducer extends MapReduceBase implements
Reducer<Text, IntWritable, Text, IntWritable> {

public void reduce(Text key, Iterator values, OutputCollector output,
Reporter reporter) throws IOException {
int sum = 0;
output.collect(key, new IntWritable(sum));
}
}

static int printUsage() {
System.out.println("wordcount ");
ToolRunner.printGenericCommandUsage(System.out);
return -1;
}

public int run(String[] args) throws Exception {
JobConf conf = new JobConf(getConf(), ChainDriver.class);
conf.setJobName("wordcount");

Path outputPath = new Path("/user/hduser/output");
FileSystem fs = FileSystem.get(new URI(outputPath.toString()), conf);
// It will delete the output directory if it already exists. don't need
// to delete it manually
fs.delete(outputPath);

// Setting the input and output path
FileInputFormat.setInputPaths(conf, "/user/hduser/chain");
FileOutputFormat.setOutputPath(conf, outputPath);

// Considering the input and output as text file set the input & output
// format to TextInputFormat
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);

JobConf mapAConf = new JobConf(false);
ChainMapper.addMapper(conf, TokenizerMapper.class, LongWritable.class,
Text.class, Text.class, IntWritable.class, true, mapAConf);

// addMapper will take global conf object and mapper class ,input and
// output type for this mapper and output key/value have to be sent by
// value or by reference and localJObconf specific to this call

JobConf mapBConf = new JobConf(false);
ChainMapper.addMapper(conf, UpperCaserMapper.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, true,
mapBConf);

JobConf reduceConf = new JobConf(false);
ChainReducer.setReducer(conf, WordCountReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, true,
reduceConf);

// JobConf mapCConf = new JobConf(false);
// ChainReducer.addMapper(conf, LastMapper.class, Text.class,
// IntWritable.class, Text.class, IntWritable.class, true,
// mapCConf);
JobClient.runJob(conf);
return 0;
}

public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(),
new ChainDriver(), args);
System.exit(res);
}
}

最佳答案

我发现我所需要的错误在哪里,那就是将所有的mapper和reducer更改为静态类。

关于java - 配置对象时链映射器错误,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/23010764/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com