基于Centos虚拟机,搭建HDFS文件系统,完成HDFS API完成编程实验
package my.hdfs; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class MakeDir { public static void main(String[] args) throws IOException, URISyntaxException { Configuration conf = new Configuration(); String hdfsPath = "hdfs://ZhangYao-1:9000"; FileSystem hdfs = FileSystem.get(new URI(hdfsPath), conf); String newDir = "/hdfstest"; boolean result = hdfs.mkdirs(new Path(newDir)); if (result) { System.out.println("Success!"); }else { System.out.println("Failed!"); } } }
package my.hdfs; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class CreateFile { public static void main(String[] args) throws IOException, URISyntaxException { Configuration configuration = new Configuration(); String hdfsPath = "hdfs://ZhangYao-1:9000"; FileSystem fs = FileSystem.get(new URI(hdfsPath), configuration); String filePath = "/hdfstest/testfile"; FSDataOutputStream os= fs.create(new Path(filePath)); os.close(); fs.close(); System.out.println("Finish!"); } }
package my.hdfs; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class DeleteFile { public static void main(String[] args) throws IOException, URISyntaxException { Configuration conf = new Configuration(); String hdfsPath = "hdfs://ZhangYao-1:9000"; FileSystem fs = FileSystem.get(new URI(hdfsPath), conf); String filePath = "/hdfstest/testfile"; boolean fe=fs.exists(new Path(filePath)); if(fe) { fs.delete(new Path(filePath)); System.out.println("Delete File Successfully!"); }else { System.out.println("File Not Found!"); } } }
package my.hdfs; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class CopyFromLocalFile { public static void main(String[] args) throws IOException, URISyntaxException { Configuration conf = new Configuration(); String hdfsPath = "hdfs://ZhangYao-1:9000"; FileSystem hdfs = FileSystem.get(new URI(hdfsPath), conf); String filepath= "/home/hadoop/sample_data"; String to_HDFS = "/hdfstest/"; hdfs.copyFromLocalFile(new Path(filepath), new Path(to_HDFS)); System.out.println("Finish!"); } }
package my.hdfs; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class WriteFile { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); conf.set("fs.defaultFS", "hdfs://ZhangYao-1:9000"); FileSystem fs = FileSystem.get(conf); byte[] buff = "Hello World Hello Data! \n".getBytes(); String newFileName = "/hdfstest/writefile"; FSDataOutputStream os = fs.create(new Path(newFileName)); os.write(buff); System.out.println("Create:" + newFileName); os.close(); fs.close(); } }
package my.hdfs; import java.io.IOException; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class ListFiles { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); String hdfspath = "hdfs://:9000/"; FileSystem hdfs = FileSystem.get(URI.create(hdfspath), conf); String watchHDFS = "/hdfstest"; FileStatus[] files = hdfs.listStatus(new Path(watchHDFS)); for (FileStatus file : files) { System.out.println(file.getPermission() + " " + file.getOwner() + " " + file.getGroup() + " " + file.getPath()); } } }
package my.hdfs; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FSDataInputStream; public class ReadFile { public static void main(String[] args) { try { Configuration conf = new Configuration(); String hdfsPath = "hdfs://ZhangYao-1:9000"; FileSystem fs = FileSystem.get(new URI(hdfsPath), conf); Path fileName = new Path("/mytext.txt"); File file=new File("/home/hadoop/backup.txt"); OutputStream out=new FileOutputStream(file); FSDataInputStream fdi = fs.open(fileName); BufferedReader br = new BufferedReader(new InputStreamReader(fdi)); String str = null; while((str = br.readLine()) != null){ byte [] buff=(str+"\n").getBytes(); out.write(buff); System.out.println(str); } out.close(); br.close(); fs.close(); } catch (Exception e) { e.printStackTrace(); } } }