2

Our hadoop cluster use kerberos,so we need use kinit first, then use command like "hadoop fs -ls /". Now I use jaas and gssapi login and create file in cluster,but failed.here is my code:

import java.security.PrivilegedAction;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;  
import org.ietf.jgss.*;

public static void main(String[] args) throws LoginException
{
    System.setProperty("sun.security.krb5.debug", "false");
    System.setProperty("java.security.krb5.realm", "H236");
    System.setProperty("java.security.krb5.kdc", "172.16.0.236");
    System.setProperty( "javax.security.auth.useSubjectCredsOnly", "false");
    System.setProperty("java.security.auth.login.config",
            "/etc/hadoop/conf/jaas.conf");
           LoginContext lc = null;
           lc = new LoginContext("Client");
           lc.login();
           System.out.println("Authentication succeeded!");

       Subject subject = lc.getSubject();
       Subject.doAs( subject, new PrivilegedAction<byte[]>() 
           {
        public byte[] run() 
        {
            Configuration conf = new Configuration();

            try
            {
                Oid krb5Mechanism = new Oid("1.2.840.113554.1.2.2");
                GSSManager manager = GSSManager.getInstance();
                GSSName clientName = manager.createName("hdfs/172.16.0.239@H236", 
                        GSSName.NT_USER_NAME);
                GSSCredential clientCreds = manager.createCredential(clientName,
                        GSSCredential.DEFAULT_LIFETIME,
                        krb5Mechanism,
                        GSSCredential.INITIATE_ONLY);
                GSSName serverName = manager.createName("[email protected]", 
                        GSSName.NT_HOSTBASED_SERVICE);
                GSSContext context = manager.createContext(serverName,
                        krb5Mechanism,
                                            clientCreds,
                                            GSSContext.DEFAULT_LIFETIME);
                context.requestMutualAuth(true);
                context.requestConf(false);
                context.requestInteg(true);
                System.out.println(clientCreds.getName().toString());
                System.out.println(clientCreds.getRemainingLifetime());                     

                    byte[] outToken = context.initSecContext(new byte[0], 0, 0);
                //create file on hadoop cluster
                FileSystem fs = FileSystem.get(conf);
                Path f = new Path("hdfs:///hdfs/123");
                FSDataOutputStream s = fs.create(f, true);
                System.out.println("done\n");                   
                                    int i = 0;
                for (i = 0; i < 100; ++i)
                    s.writeChars("test");
                s.close();

            }catch (Exception e)
                       {
                                    e.printStackTrace();
                       }
                            return null;
                  }//endof run
        });
   }//endof main

jaas.conf like below:

Client {
          com.sun.security.auth.module.Krb5LoginModule required
          debug=true
          storeKey=true
          doNotPrompt=true
          useKeyTab=true
          keyTab="/etc/hadoop/conf/hdfs.keytab"
          principal="hdfs/172.16.0.239@H236";
         };

My login user name is root, before use "hadoop jar ./client.jar" run this code, I run kdestory to delete kerberos cache ,then I get error below:

Authentication succeeded!
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.sec
urity.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided      
(Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
ipc.Client: Exception encountered while connecting to the server : javax.security.sasl.SaslExcepti
on: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to    
obtain new INITIATE credentials failed! (null))]
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:java.io.I
OException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid   
credentials provided (Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
WARN retry.RetryInvocationHandler: Exception while invoking class org.apache.hadoop.hdfs.protocolPB.Cli
entNamenodeProtocolTranslatorPB.create. Not retrying because the invoked method is not idempotent,   
and unable to determine whether it was invoked
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: 
GSSinitiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to 
obtain new INITIATE credentials failed! (null))]; Host Details : local host is: "XP236/172.16.0.236"; destination 
host is: "172.16.0.236":8020;at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)

I don't know how to make it works,can anybody help me,Thanks a lot.

1
  • Kerberos does not work with IPs and won't. Use first of all hostnames or forget it. Commented May 25, 2013 at 19:01

1 Answer 1

1

please use the below code to access secure hadoop Accessing through proxy user code,configure this in core -site .xml also...similar how ooozie is accessing

import java.security.PrivilegedExceptionAction;
import java.text.SimpleDateFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

public class HdfsTest {

public static void main(String args[]) {
    final Configuration conf = new Configuration();
    try {

        conf.set("fs.defaultFS",
                "hdfs://ibm-r1-node7.ibmbi-nextgen.com:8020");
        UserGroupInformation.setConfiguration(conf);

        UserGroupInformation ugi = UserGroupInformation.createProxyUser(
                args[0],   UserGroupInformation.getUGIFromTicketCache(
                        "/tmp/krb5cc_0", args[1]));

        System.out.println("--------------status---:"
                + UserGroupInformation.isLoginKeytabBased());

        System.out.println("---------AFTER LOGIN-----:");

        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {

                FileSystem fs = FileSystem.get(conf);
                Path path = new Path("hdfs://10.132.100.224:8020/tmp/root");

                FileStatus[] statusArray = fs.listStatus(path);
                System.out.println("------------------------------"
                        + fs.listStatus(path));
                int count = 0;

                SimpleDateFormat sdf = null;
                for (FileStatus status : statusArray) {

                    Long blockSize = status.getBlockSize();

                    String permission = status.getPermission() + "";
                    int replication = status.getReplication();
                    String owner = status.getOwner();
                    String paths = status.getPath() + "";
                    boolean file = status.isFile();
                    Long length = status.getLen();
                    String group = status.getGroup();
                    System.out.println("BlockSize   :" + blockSize);
                    System.out.println("Group   :" + group);
                    System.out.println("Length  :" + length);
                    System.out.println("Owner   :" + owner);
                    System.out.println("Replication :" + replication);
                    System.out.println("File     :" + file);
                    System.out.println("Permission  :" + permission);
                    System.out.println("Path    :" + paths);
                    count++;
                    System.out
                            .println("-------------without auth-----count---------------"
                                    + count);
                }

                return null;
            }
        });

    } catch (Exception e) {
        System.out.println("--------EXCEPTION________________");
        e.printStackTrace();

    }
}

}

Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.