更新时间:2025-09-22 GMT+08:00
初始化HDFS
功能简介
在使用HDFS提供的API之前,需要先进行HDFS初始化操作。过程为:
- 加载HDFS服务配置文件,并进行kerberos安全认证。
- 认证通过后,实例化Filesystem。

此处kerberos安全认证需要使用到的keytab文件,请提前准备。
代码样例
如下是代码片段,详细代码请参考com.huawei.bigdata.hdfs.examples的HdfsExample类。
在Linux客户端运行应用和在Windows环境下运行应用的初始化代码相同,代码样例如下所示。
// 完成初始化和认证
confLoad();
authentication();
// 创建一个用例
HdfsExample hdfs_examples = new HdfsExample("/user/hdfs-examples", "test.txt");
/**
*
* 如果程序运行在Linux上,则需要core-site.xml、hdfs-site.xml的路径修改为在Linux下客户端文件的绝对路径
*
*
*/
private static void confLoad() throws IOException {
conf = new Configuration();
// conf file
conf.addResource(new Path(PATH_TO_HDFS_SITE_XML));
conf.addResource(new Path(PATH_TO_CORE_SITE_XML));
// conf.addResource(new Path(PATH_TO_SMALL_SITE_XML));
}
/**
*安全认证
*
*/
private static void authentication() throws IOException {
// security mode
if ("kerberos".equalsIgnoreCase(conf.get("hadoop.security.authentication"))) {
System.setProperty("java.security.krb5.conf", PATH_TO_KRB5_CONF);
LoginUtil.login(PRINCIPAL_NAME, PATH_TO_KEYTAB, PATH_TO_KRB5_CONF, conf);
}
}
/**
*创建用例
*/
public HdfsExample(String path, String fileName) throws IOException {
this.DEST_PATH = path;
this.FILE_NAME = fileName;
instanceBuild();
}
private void instanceBuild() throws IOException {
fSystem = FileSystem.get(conf);
}
在Windows环境和Linux环境下都需要运行login的代码样例,用于第一次登录使用,详细代码请参考com.huawei.hadoop.security中的LoginUtil类。
public synchronized static void login(String userPrincipal, String userKeytabPath, String krb5ConfPath, Configuration conf) throws IOException { // 1.检查输入参数 if ((userPrincipal == null) || (userPrincipal.length() <= 0)) { LOG.error("input userPrincipal is invalid."); throw new IOException("input userPrincipal is invalid."); } if ((userKeytabPath == null) || (userKeytabPath.length() <= 0)) { LOG.error("input userKeytabPath is invalid."); throw new IOException("input userKeytabPath is invalid."); } if ((krb5ConfPath == null) || (krb5ConfPath.length() <= 0)) { LOG.error("input krb5ConfPath is invalid."); throw new IOException("input krb5ConfPath is invalid."); } if ((conf == null)) { LOG.error("input conf is invalid."); throw new IOException("input conf is invalid."); } // 2.检查文件是否存在 File userKeytabFile = new File(userKeytabPath); if (!userKeytabFile.exists()) { LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") does not exsit."); throw new IOException("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") does not exsit."); } if (!userKeytabFile.isFile()) { LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") is not a file."); throw new IOException("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") is not a file."); } File krb5ConfFile = new File(krb5ConfPath); if (!krb5ConfFile.exists()) { LOG.error("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") does not exsit."); throw new IOException("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") does not exsit."); } if (!krb5ConfFile.isFile()) { LOG.error("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") is not a file."); throw new IOException("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") is not a file."); } // 3.设置并检查krb5config setKrb5Config(krb5ConfFile.getAbsolutePath()); setConfiguration(conf); // 4.登录hadoop并检查 loginHadoop(userPrincipal, userKeytabFile.getAbsolutePath()); LOG.info("Login success!!!!!!!!!!!!!!"); }
父主题: 开发HDFS应用