gpt4 book ai didi

java - 不能使用 kerberos 票证用 java 代码创建 hadoop 文件

转载 作者:行者123 更新时间:2023-11-30 11:32:42 25 4
gpt4 key购买 nike

我们的 hadoop 集群使用 kerberos,所以我们需要先使用 kinit,然后使用像“hadoop fs -ls/”这样的命令。现在我使用 jaas 和 gssapi 登录并在集群中创建文件,但失败了。这是我的代码:

import java.security.PrivilegedAction;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.ietf.jgss.*;

public static void main(String[] args) throws LoginException
{
System.setProperty("sun.security.krb5.debug", "false");
System.setProperty("java.security.krb5.realm", "H236");
System.setProperty("java.security.krb5.kdc", "172.16.0.236");
System.setProperty( "javax.security.auth.useSubjectCredsOnly", "false");
System.setProperty("java.security.auth.login.config",
"/etc/hadoop/conf/jaas.conf");
LoginContext lc = null;
lc = new LoginContext("Client");
lc.login();
System.out.println("Authentication succeeded!");

Subject subject = lc.getSubject();
Subject.doAs( subject, new PrivilegedAction<byte[]>()
{
public byte[] run()
{
Configuration conf = new Configuration();

try
{
Oid krb5Mechanism = new Oid("1.2.840.113554.1.2.2");
GSSManager manager = GSSManager.getInstance();
GSSName clientName = manager.createName("hdfs/172.16.0.239@H236",
GSSName.NT_USER_NAME);
GSSCredential clientCreds = manager.createCredential(clientName,
GSSCredential.DEFAULT_LIFETIME,
krb5Mechanism,
GSSCredential.INITIATE_ONLY);
GSSName serverName = manager.createName("hdfs@172.16.0.239",
GSSName.NT_HOSTBASED_SERVICE);
GSSContext context = manager.createContext(serverName,
krb5Mechanism,
clientCreds,
GSSContext.DEFAULT_LIFETIME);
context.requestMutualAuth(true);
context.requestConf(false);
context.requestInteg(true);
System.out.println(clientCreds.getName().toString());
System.out.println(clientCreds.getRemainingLifetime());

byte[] outToken = context.initSecContext(new byte[0], 0, 0);
//create file on hadoop cluster
FileSystem fs = FileSystem.get(conf);
Path f = new Path("hdfs:///hdfs/123");
FSDataOutputStream s = fs.create(f, true);
System.out.println("done\n");
int i = 0;
for (i = 0; i < 100; ++i)
s.writeChars("test");
s.close();

}catch (Exception e)
{
e.printStackTrace();
}
return null;
}//endof run
});
}//endof main

jaas.conf 如下所示:

Client {
com.sun.security.auth.module.Krb5LoginModule required
debug=true
storeKey=true
doNotPrompt=true
useKeyTab=true
keyTab="/etc/hadoop/conf/hdfs.keytab"
principal="hdfs/172.16.0.239@H236";
};

我的登录用户名是 root,在使用“hadoop jar ./client.jar”之前运行这段代码,我运行 kdestory 来删除 kerberos 缓存,然后我得到以下错误:

Authentication succeeded!
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.sec
urity.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided
(Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
ipc.Client: Exception encountered while connecting to the server : javax.security.sasl.SaslExcepti
on: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to
obtain new INITIATE credentials failed! (null))]
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:java.io.I
OException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid
credentials provided (Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
WARN retry.RetryInvocationHandler: Exception while invoking class org.apache.hadoop.hdfs.protocolPB.Cli
entNamenodeProtocolTranslatorPB.create. Not retrying because the invoked method is not idempotent,
and unable to determine whether it was invoked
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException:
GSSinitiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to
obtain new INITIATE credentials failed! (null))]; Host Details : local host is: "XP236/172.16.0.236"; destination
host is: "172.16.0.236":8020;at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)

我不知道如何让它工作,任何人都可以帮助我,非常感谢。

最佳答案

请使用以下代码访问安全的hadoop通过代理用户代码访问,在 core -site .xml 中配置它也...类似于 ooozie 访问的方式

import java.security.PrivilegedExceptionAction;
import java.text.SimpleDateFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

public class HdfsTest {

public static void main(String args[]) {
final Configuration conf = new Configuration();
try {

conf.set("fs.defaultFS",
"hdfs://ibm-r1-node7.ibmbi-nextgen.com:8020");
UserGroupInformation.setConfiguration(conf);

UserGroupInformation ugi = UserGroupInformation.createProxyUser(
args[0], UserGroupInformation.getUGIFromTicketCache(
"/tmp/krb5cc_0", args[1]));

System.out.println("--------------status---:"
+ UserGroupInformation.isLoginKeytabBased());

System.out.println("---------AFTER LOGIN-----:");

ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {

FileSystem fs = FileSystem.get(conf);
Path path = new Path("hdfs://10.132.100.224:8020/tmp/root");

FileStatus[] statusArray = fs.listStatus(path);
System.out.println("------------------------------"
+ fs.listStatus(path));
int count = 0;

SimpleDateFormat sdf = null;
for (FileStatus status : statusArray) {

Long blockSize = status.getBlockSize();

String permission = status.getPermission() + "";
int replication = status.getReplication();
String owner = status.getOwner();
String paths = status.getPath() + "";
boolean file = status.isFile();
Long length = status.getLen();
String group = status.getGroup();
System.out.println("BlockSize :" + blockSize);
System.out.println("Group :" + group);
System.out.println("Length :" + length);
System.out.println("Owner :" + owner);
System.out.println("Replication :" + replication);
System.out.println("File :" + file);
System.out.println("Permission :" + permission);
System.out.println("Path :" + paths);
count++;
System.out
.println("-------------without auth-----count---------------"
+ count);
}

return null;
}
});

} catch (Exception e) {
System.out.println("--------EXCEPTION________________");
e.printStackTrace();

}
}

关于java - 不能使用 kerberos 票证用 java 代码创建 hadoop 文件,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/16474649/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com