gpt4 book ai didi

hadoop - Hadoop多个输入文件错误

转载 作者:行者123 更新时间:2023-12-02 21:31:05 25 4
gpt4 key购买 nike

我试图用下面的代码从hdfs输入中读取2个文件,但我遇到如下错误
我是mapreduce编程的初学者,并且在此问题上坚持了几天,任何帮助将不胜感激。

我的代码:

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

public class Recipe {

public static class TokenizerMapper1
extends Mapper<Object, Text, Text, IntWritable>{

private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(Object key, Text value, Context context
) throws IOException, InterruptedException {

String line=value.toString();

word.set(line.substring(2,8));
context.write(word,one);
}
}

public static class TokenizerMapper2
extends Mapper<Object, Text, Text, IntWritable>{

private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(Object key, Text value, Context context
) throws IOException, InterruptedException {

String line=value.toString();

word.set(line.substring(2,8));
context.write(word,one);
}
}

public static class IntSumReducer
extends Reducer<Text,IntWritable,Text,IntWritable> {
private IntWritable result = new IntWritable();

public void reduce(Text key, Iterable<IntWritable> values,
Context context
) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}

public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

if (otherArgs.length != 2) {
System.err.println("Usage: recipe <in> <out>");
System.exit(2);
}
@SuppressWarnings("deprecation")
Job job = new Job(conf, "Recipe");

job.setJarByClass(Recipe.class);
job.setMapperClass(TokenizerMapper1.class);
job.setMapperClass(TokenizerMapper2.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
MultipleInputs.addInputPath(job,new Path(args[0]),TextInputFormat.class,TokenizerMapper1.class);
MultipleInputs.addInputPath(job,new Path(args[1]),TextInputFormat.class,TokenizerMapper2.class);
FileOutputFormat.setOutputPath(job, new Path(args[2]));
//FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/in"));
//FileOutputFormat.setOutputPath(job, new Path("hdfs://127.0.0.1:9000/out"));
System.exit(job.waitForCompletion(true) ? 0 : 1);
// job.submit();
}

而且我已经设置了程序运行配置参数,如下所示:
/输入
错误:

Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 2 at Recipe.main(Recipe.java:121)

最佳答案

有几个问题。程序需要3个参数,而您只传递2个参数。另外,如果必须处理多种输入格式,则需要使用MultipleInputs。

假设您调用程序/ in1 / in2 / out

    MultipleInputs.addInputPath(job, args[0], TokenizerMapper1.class, FirstMapper.class);
MultipleInputs.addInputPath(job, args[1], TokenizerMapper2.class, SecondMapper.class);

您可以从代码中删除这些行:
job.setMapperClass(TokenizerMapper1.class);
job.setMapperClass(TokenizerMapper2.class);

关于hadoop - Hadoop多个输入文件错误,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/34212978/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com