美文网首页
Hadoop开启kerberos后, Java Api访问Hd

Hadoop开启kerberos后, Java Api访问Hd

作者: 清蒸三文鱼_ | 来源:发表于2024-04-07 18:04 被阅读0次

简述

hdfs和yarn开启了kerberos, 且都开启了Console Http的认证, http认证使用的是Spnego, 使用HttpClient来进行支持, 程序建议在Linux下运行, window可能会报错;如何开启认证, 具体参考Hadoop开启kerberos

pom.xml

 <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-client</artifactId>
    <version>3.3.0</version>
</dependency>
<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>3.3.0</version>
    <exclusions>
        <exclusion>
            <groupId>javax.servlet</groupId>
            <artifactId>servlet-api</artifactId>
        </exclusion>
    </exclusions>
</dependency>
<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>3.3.0</version>
</dependency>

Hdfs

运行程序时需要执行kinit -kt hdfs.keytab hdfs

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.AnnotatedSecurityInfo;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;

import java.security.PrivilegedExceptionAction;

public class HdfsMain {
    public static void main(String[] args) throws Exception {
        String local_dir = "/tmp/";
        String userName = "hdfs/hdfs@HADOOP.COM";
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://xxx:8020");
        conf.set("hadoop.security.authentication", "kerberos");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

        System.setProperty("java.security.krb5.conf", local_dir + "krb5.conf");
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytabAndReturnUGI(userName, local_dir + "hdfs.keytab");
        SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
        FileSystem fs = UserGroupInformation.getLoginUser().doAs(
                (PrivilegedExceptionAction<FileSystem>) () -> FileSystem.get(conf));
    }
}

Yarn

运行程序时需要执行kinit -kt hdfs.keytab hdfs

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.AnnotatedSecurityInfo;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;

import java.util.EnumSet;
import java.util.List;

public class YarnMain {

    public static void main(String[] args) throws Exception {
        String local_dir = "/tmp/";
        Configuration conf = new Configuration();
        conf.addResource(new Path(local_dir + "core-site.xml");
        conf.addResource(new Path(local_dir + "yarn-site.xml"));
        conf.addResource(new Path(local_dir + "hdfs-site.xml"));
        SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
        System.setProperty("java.security.krb5.conf", local_dir + "krb5.conf");
        conf.set("hadoop.security.authentication", "kerberos");
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytabAndReturnUGI("hdfs@HADOOP.COM", local_dir + "hdfs.keytab");
        YarnClient yarnClient = YarnClient.createYarnClient();
        yarnClient.init(conf);
        yarnClient.start();
        List<ApplicationReport> appReports = yarnClient.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
        for (ApplicationReport report : appReports) {
            System.out.println("Application ID: " + report.getApplicationId());
        }
    }
}

Yarn Http Spnego

  • KerberosJaasConfig
import com.sun.security.auth.module.Krb5LoginModule;
import javax.security.auth.login.AppConfigurationEntry;
import java.util.HashMap;
import java.util.Map;

public class KerberosJaasConfig extends Configuration {
    private final String principal;
    private final String keytabPath;

    public KerberosJaasConfig(String principal, String keytabPath) {
        this.principal = principal;
        this.keytabPath = keytabPath;
    }

    @Override
    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
        Map<String, String> options = new HashMap<>();
        options.put("principal", principal);
        options.put("keyTab", keytabPath);
        options.put("useKeyTab", "true");
        options.put("storeKey", "true");
        options.put("doNotPrompt", "true");
        options.put("debug", "false");
        return new AppConfigurationEntry[]{new AppConfigurationEntry(Krb5LoginModule.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options)};
    }
}
  • SpnegoMain
import org.apache.http.HttpEntity;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.auth.KerberosCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;

import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedAction;

public class SpnegoMain {
    public static void main(String[] args) throws Exception {
        String local_dir = "/tmp/";
        KerberosJaasConfig config = new KerberosJaasConfig("yarn@HADOOP.COM", local_dir + "/yarn.keytab");
        LoginContext loginContext = new LoginContext("sp-client", null, null, config);
        loginContext.login();
        Subject subject = loginContext.getSubject();
        GssCredentialsProvider credentialsProvider = new GssCredentialsProvider();
        CloseableHttpClient httpClient = HttpClients.custom()
                .setDefaultCredentialsProvider(credentialsProvider)
                .build();
        HttpGet httpGet = new HttpGet("http://xxx:8088/cluster");
        CloseableHttpResponse response = Subject.doAs(subject, (PrivilegedAction<CloseableHttpResponse>) () -> {
            try {
                return httpClient.execute(httpGet);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
        HttpEntity entity = response.getEntity();
        String content = EntityUtils.toString(entity, StandardCharsets.UTF_8);
        System.out.println(content);
    }

    public static class GssCredentialsProvider implements CredentialsProvider {
        @Override
        public void setCredentials(AuthScope authScope, Credentials credentials) {
        }

        @Override
        public Credentials getCredentials(AuthScope authScope) {
            return new KerberosCredentials(null);
        }

        @Override
        public void clear() {
        }
    }
}

上面的程序也可以直接在window下的IDE执行, 期间可能会报错 java.net.PortUnreachableException: ICMP Port Unreachable, 这个可能会hosts表有关, 需要检查是否域名冲突了

log4j.properties

log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n

相关文章

网友评论

      本文标题:Hadoop开启kerberos后, Java Api访问Hd

      本文链接:https://www.haomeiwen.com/subject/lthtxjtx.html