1、创建目录
1 import java.io.IOException; 2 import org.apache.hadoop.conf.Configuration; 3 import org.apache.hadoop.fs.FileSystem; 4 import org.apache.hadoop.fs.Path; 5 6 public class MakeDir { 7 public static void main(String[] args) throws IOException { 8 FileSystem fs = FileSystem.get(new URI("hdfs://linux1:9000"), 9 new Configuration(),"root"); 10 Path path = new Path("/user/hadoop/data/20130709"); 11 fs.mkdirs(path); 12 fs.close(); 13 } 14 }
2、删除目录
3、写文件
1 import java.io.IOException; 2 import org.apache.hadoop.conf.Configuration; 3 import org.apache.hadoop.fs.FSDataOutputStream; 4 import org.apache.hadoop.fs.FileSystem; 5 import org.apache.hadoop.fs.Path; 6 7 public class WriteFile { 8 public static void main(String[] args) throws IOException { 9 FileSystem fs = FileSystem.get(new URI("hdfs://linux1:9000"),10 new Configuration(),"root"); 11 Path path = new Path("/user/hadoop/data/write.txt"); 12 FSDataOutputStream out = fs.create(path); 13 out.writeUTF("da jia hao,cai shi zhen de hao!"); 14 fs.close(); 15 } 16 }
4、读文件
1 import java.io.IOException; 2 import org.apache.hadoop.conf.Configuration; 3 import org.apache.hadoop.fs.FSDataInputStream; 4 import org.apache.hadoop.fs.FileStatus; 5 import org.apache.hadoop.fs.FileSystem; 6 import org.apache.hadoop.fs.Path; 7 8 public class ReadFile { 9 public static void main(String[] args) throws IOException { 10 FileSystem fs = FileSystem.get(new URI("hdfs://linux1:9000"),11 new Configuration(),"root"); 12 Path path = new Path("/user/hadoop/data/write.txt"); 13 14 if(fs.exists(path)){ 15 FSDataInputStream is = fs.open(path); 16 FileStatus status = fs.getFileStatus(path); 17 byte[] buffer = new byte[Integer.parseInt(String.valueOf(status.getLen()))]; 18 is.readFully(0, buffer); 19 is.close(); 20 fs.close(); 21 System.out.println(buffer.toString()); 22 } 23 } 24 }
5、上传本地文件到HDFS
6、删除文件
1 import java.io.IOException; 2 import org.apache.hadoop.conf.Configuration; 3 import org.apache.hadoop.fs.FileSystem; 4 import org.apache.hadoop.fs.Path; 5 6 public class DeleteFile { 7 8 public static void main(String[] args) throws IOException { 9 Configuration conf = new Configuration(); 10 FileSystem fs = FileSystem.get("hdfs://linux1:9000"),11 new Configuration(),"root"); 12 Path path = new Path("/user/hadoop/data/word.txt"); 13 fs.delete(path); 14 fs.close(); 15 } 16 }
7、获取给定目录下的所有子目录以及子文件
1 import java.io.IOException; 2 import org.apache.hadoop.conf.Configuration; 3 import org.apache.hadoop.fs.BlockLocation; 4 import org.apache.hadoop.fs.FileStatus; 5 import org.apache.hadoop.fs.FileSystem; 6 import org.apache.hadoop.fs.Path; 7 import org.apache.hadoop.hdfs.DistributedFileSystem; 8 import org.apache.hadoop.hdfs.protocol.DatanodeInfo; 9 10 public class FindFile { 11 12 public static void main(String[] args) throws IOException { 13 getFileLocal(); 14 } 15 16 /** 17 * 查找某个文件在HDFS集群的位置 18 * @Title: 19 * @Description: 20 * @param 21 * @return 22 * @throws 23 */ 24 public static void getFileLocal() throws IOException{ 25 FileSystem fs = FileSystem.get("hdfs://linux1:9000"),26 new Configuration(),"root"); 27 Path path = new Path("/user/hadoop/data/write.txt"); 28 29 FileStatus status = fs.getFileStatus(path); 30 BlockLocation[] locations = fs.getFileBlockLocations(status, 0, status.getLen()); 31 32 int length = locations.length; 33 for(int i=0;i
1 package com.hadoop.file; 2 3 import java.io.IOException; 4 import org.apache.hadoop.conf.Configuration; 5 import org.apache.hadoop.fs.BlockLocation; 6 import org.apache.hadoop.fs.FileStatus; 7 import org.apache.hadoop.fs.FileSystem; 8 import org.apache.hadoop.fs.Path; 9 import org.apache.hadoop.hdfs.DistributedFileSystem; 10 import org.apache.hadoop.hdfs.protocol.DatanodeInfo; 11 12 public class FindFile { 13 14 public static void main(String[] args) throws IOException { 15 getHDFSNode(); 16 } 17 18 /** 19 * HDFS集群上所有节点名称信息 20 * @Title: 21 * @Description: 22 * @param 23 * @return 24 * @throws 25 */ 26 public static void getHDFSNode() throws IOException{ 27 Configuration conf = new Configuration(); 28 FileSystem fs = FileSystem.get("hdfs://linux1:9000"),29 new Configuration(),"root"); 30 31 DistributedFileSystem dfs = (DistributedFileSystem)fs; 32 DatanodeInfo[] dataNodeStats = dfs.getDataNodeStats(); 33 34 for(int i=0;i