gpt4 book ai didi

hadoop - NoSuchMethodException org.apache.hadoop.yarn.api.records.URL.fromURI

转载 作者:可可西里 更新时间:2023-11-01 15:49:30 26 4
gpt4 key购买 nike

我正在尝试从 hbase 表中读取数据,对其进行一些处理并使用以下代码将其存储在另一个表中

package analysis;
import java.io.IOException;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
public class Author_ref {

public static class MyMapper extends TableMapper<Text,Text> {

public void map(ImmutableBytesWritable row, Result value,Context context)throws IOException, InterruptedException
{
String key = new String(row.get());
String values = new String(value.getValue(Bytes.toBytes("authors"), Bytes.toBytes("authors")));
String clean_values = values.replaceAll("[","");
String clean_values2 = clean_values.replaceAll("]","");
String authors[] = clean_values2.trim().split(",");

for (String author : authors)
{
//Put row = new Put();
context.write(new Text(author),new Text( key));
}

}
}

public static class MyReducer extends TableReducer<Text, Text, ImmutableBytesWritable>
{
public void reduce(Text author, Iterable<Text> values,Context context)throws IOException,InterruptedException
{
String papers = "";
for (Text x : values)
{
papers = papers + ","+x.toString();
}
Put p = new Put(author.getBytes());
p.add(Bytes.toBytes("papers_writen"),Bytes.toBytes("papers_writen"),Bytes.toBytes(papers));
context.write(null, p);
}
}
public static void main(String[] args) throws Exception
{
Configuration config = HBaseConfiguration.create();
Job job = new Job(config,"ExampleSummary");
Scan scan = new Scan();
scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
scan.setCacheBlocks(false);
job.setJarByClass(Author_ref.class); // class that contains mapper and reducer
TableMapReduceUtil.initTableMapperJob(
"Dataset", // input table
scan, // Scan instance to control CF and attribute selection
MyMapper.class, // mapper class
Text.class, // mapper output key
Text.class, // mapper output value
job);
TableMapReduceUtil.initTableReducerJob(
"Author_paper", // output table
MyReducer.class, // reducer class
job);

job.setNumReduceTasks(1); // at least one, adjust as required

System.exit(job.waitForCompletion(true)?0:1);

}

}

M 收到以下错误..

线程“main”中的异常 java.lang.NoSuchMethodError: org.apache.hadoop.yarn.api.records.URL.fromURI(Ljava/net/URI;)Lorg/apache/hadoop/yarn/api/records/网址; 在 org.apache.hadoop.mapreduce.v2.util.LocalResourceBuilder.createLocalResources(LocalResourceBuilder.java:144) 在 org.apache.hadoop.mapreduce.v2.util.MRApps.setupDistributedCache(MRApps.java:531) 在 org.apache.hadoop.mapred.LocalDistributedCacheManager.setup(LocalDistributedCacheManager.java:92) 在 org.apache.hadoop.mapred.LocalJobRunner$Job.(LocalJobRunner.java:171) 在 org.apache.hadoop.mapred.LocalJobRunner.submitJob(LocalJobRunner.java:760) 在 org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:253) 在 org.apache.hadoop.mapreduce.Job$11.run(Job.java:1570) 在 org.apache.hadoop.mapreduce.Job$11.run(Job.java:1567) 在 java.security.AccessController.doPrivileged( native 方法) 在 javax.security.auth.Subject.doAs(Subject.java:422) 在 org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1889) 在 org.apache.hadoop.mapreduce.Job.submit(Job.java:1567) 在 org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1588) 在 analysis.Author_ref.main(Author_ref.java:111)

我正在使用 hadoop 2.9 和 hbase 1.2.6.1

最佳答案

hadoop 2.9 和 hbase 1.2.x 不兼容,看看这个

http://hbase.apache.org/book.html#basic.prerequisites

您必须使用兼容的版本。

关于hadoop - NoSuchMethodException org.apache.hadoop.yarn.api.records.URL.fromURI,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/51625899/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com