Java教程

HDFS API操作

本文主要是介绍HDFS API操作,对大家解决编程问题具有一定的参考价值,需要的程序猿们随着小编来一起学习吧!

一、导入依赖包

在File->Project Structure->Modules->Dependencies下导入Hadoop->share文件下的相应jar包,包括:

common下的:hadoop-common-3.2.1.jar、

hadoop-nfs-3.2.1.jar以及所有lib下的jar包

hdfs下以及hdfs/lib下的所有jar包

 

 

 

二、api操作

 

package hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.Test;

import java.io.*;import java.net.URI;

public class hdfsapi {
/**
 * ./hadoop fs -ls /
 * @throws IOException
 */
@Test
public void createFolder() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    Path path=new Path("/test2");
    fs.mkdirs(path, FsPermission.getDirDefault());
    System.out.println("finished");
}

/**
 * ./hadoop fs -ls /test2/
 * @throws IOException
 */
@Test
public void createFile() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    Path path=new Path("/test2/a1.txt");
    FSDataOutputStream out = fs.create(path);
    out.write("11111".getBytes());

    System.out.println("finished");
}

/**
 * ./hadoop fs -ls /test2/
 * @throws IOException
 */
@Test
public void renameFile() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    Path path=new Path("/test2/a1.txt");
    Path path2=new Path("/test2/a2.txt");
    boolean result = fs.rename(path, path2);

    System.out.println(result);
}

/**
 * ./hadoop fs -ls /test2/
 * @throws IOException
 */
@Test
public void uploadFile() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    Path path=new Path("F:\\5、开发语言\\java\\it.cast\\knowledge\\src\\main\\resources\\1.txt");
    Path path2=new Path("/test2/a3.txt");
    fs.copyFromLocalFile(path,path2);

    System.out.println("finished.");
}

/**
 * 上传文件方式2 显示进度
 * @throws IOException
 */
@Test
public void uploadFile2() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    InputStream in =new BufferedInputStream(new FileInputStream(new File("D:\\software\\CodeSmithGeneratoTemplates.7z")));
    FSDataOutputStream out=fs.create(new Path("/test2/a4.7z"),new Progressable(){
        @Override
        public void progress(){
            System.out.println("...");
        }
    });
    IOUtils.copyBytes(in,out,4096);

    System.out.println("finished.");
}

/**
 * ./hadoop fs -ls /test2/
 * @throws IOException
 */
@Test
public void getFileList() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    Path path=new Path("/test2");
    FileStatus[] fileStatuses = fs.listStatus(path);
    for (FileStatus fileStatus:fileStatuses) {
        System.out.println(fileStatus.getPath());
    }
}

/**
 * ./hadoop fs -ls /test2/
 * @throws IOException
 */
@Test
public void getFileBlock() throws IOException {
    Configuration conf=new Configuration();
    FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf);
    Path path=new Path("/test2/a4.7z");
    FileStatus fileStatus=fs.getFileStatus(path);
    BlockLocation[] fileBlockLocations = fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
    for(BlockLocation loc:fileBlockLocations){
        for(int i=0;i<loc.getHosts().length;i++){
            System.out.println(loc.getHosts()[i]);
        }
    }
}

 

}

 

三、创建问题

 

1、提示无权限

 

vim hdfs-site.xml

<configuration>

     <property>

           <name>dfs.permissions.enabled</name>

         <value>false</value>

     </property>
</configuration>

 

这篇关于HDFS API操作的文章就介绍到这儿,希望我们推荐的文章对大家有所帮助,也希望大家多多支持为之网!