gpt4 book ai didi

hadoop - java.io.IOException : Type mismatch in value from map: expected org. apache.hadoop.io.IntWritable,收到 org.apache.hadoop.io.Text

转载 作者:可可西里 更新时间:2023-11-01 15:16:15 25 4
gpt4 key购买 nike

我在主 block 中配置了 Mapper、reducer 类以及 map 输出键值类。我不明白抛出错误的代码有什么问题 Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, recieved org.apache.hadoop.io.Text有人可以帮忙吗?谢谢。

代码是:

import java.io.IOException;
import java.lang.String;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.util.StringTokenizer;

public class Alphabet {

public static class AlphabetMapper
extends Mapper<IntWritable, Text, LongWritable, IntWritable>{
private Text word = new Text();

public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException{
String line = value.toString();

StringTokenizer tokenizer = new StringTokenizer(line);

while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
context.write( new LongWritable(word.getLength()), new IntWritable(1) );
}

}

}
public static class AlphabetReducer
extends Reducer<LongWritable, IntWritable, LongWritable, IntWritable>{
public void reduce(LongWritable key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {

int sum = 0;

for (IntWritable val : values) {
sum += val.get();
}

context.write( key, new IntWritable(sum) );
}

}
public static void main(String[] args) throws Exception {

if (args.length!=2){
System.err.println("Usage:Alphabet <input path> <output path>");
System.exit(-1);
}

Job job =new Job();
job.setJarByClass(Alphabet.class);
job.setJobName("Word Char Count");

FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));

job.setMapperClass(AlphabetMapper.class);
job.setReducerClass(AlphabetReducer.class);

job.setMapOutputKeyClass( LongWritable.class );
job.setMapOutputValueClass( IntWritable.class );

job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(IntWritable.class);

System.exit(job.waitForCompletion(true)?0:1);

}

}

最佳答案

如果您使用 FileInputFormat,映射器的默认输入是 LongWritableText

关于hadoop - java.io.IOException : Type mismatch in value from map: expected org. apache.hadoop.io.IntWritable,收到 org.apache.hadoop.io.Text,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/20918249/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com