HDFS Java API操作——《大数据技术原理与应用(第3版)》期末复习
HDFS Java API操作
这里使用Junit包的@Before、@Test方法进行调试测试
创建目录
fs.mkdirs()
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//初始化环境变量
@Before
public void init() throws URISyntaxException {
/*
* new URI("hdfs:192.168.206.3:9000"):连接Hadoop
* Configuration() : 使用Hadoop默认配置
* "root":登录用户
*/
Configuration conf = new Configuration();
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),conf);
}
//创建目录
@Test
public void testMkdir(){
boolean flag = fs.mkdirs(new Path("/javaAPI/mk/dir1/dir2"));
System.out.println(flag ? "创建成功":"创建失败");
}
显示目录列表
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
private List<String> hdfsPathsLists;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//显示目录
@Test
public void getDirList() throws IOException {
//初始化存放目录的List
hdfsPathsLists = new ArrayList<String>();
//调用获取目录方法
getHdfspaths(new Path("/"));
//循环输入结果
for (String p:hdfsPathsLists){
System.out.println(p);
}
}
//获取目录方法
private void getHdfspaths(Path path) throws IOException {
/*递归方法便利获取目录及目录下文件*/
FileStatus[] dirs = fs.listStatus(path);
for(FileStatus s:dirs){
hdfsPathsLists.add(s.getPath().toString());
if(s.isDirectory()){
getHdfspaths(s.getPath());
}
}
}
@After
public void close() throws IOException {
fs.close();
}
删除目录
fs.deleteOnExit()
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//删除目录
@Test
public void testRMdir() throws IOException {
boolean flag = fs.deleteOnExit(new Path("/javaAPI/mk/dir1/dir2"));
System.out.println(flag ? "删除成功":"删除失败");
}
判断文件存在
fs.exists()
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//判断文件存在
@Test
public void testExistsFile() throws IOException {
String src = "hdfs://192.168.206.3:9000/a.txt";
boolean b = fs.exists(new Path(src));
if(b){
System.out.println(b + "-exists");
}else{
System.out.println(b + "-not exists");
}
}
目录和文件判断
fs.isDirectory
fs.isFile
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//目录和文件判断
@Test
public void testfilesFile() throws IOException {
String src = "hdfs://192.168.206.3:9000/a.txt";
boolean b = fs.isDirectory(new Path(src));
if(b){
System.out.println("是目录鸭");
}else if(fs.isFile(new Path(src))){
System.out.println("是文件鸭");
}else{
System.out.println("不确定");
}
}
重命名文件
fs.rename
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//重命名文件
@Test
public void testRenameFile() throws IOException {
String src = "hdfs://192.168.206.3:9000/a.txt";
String hdfsDst = "hdfs://192.168.206.3:9000/b.txt";
fs.rename(new Path(src),new Path(hdfsDst));
System.out.println("重命名成功");
}
上传文件
fs.copyFromLocalFile
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//上传文件
@Test
public void testUploadFile() throws IOException {
String src = "/simple/local.txt";
String hdfsDst = "/javaAPI";
fs.copyFromLocalFile(new Path(src),new Path(hdfsDst));
System.out.println("upload success");
}
文件移动
fs.moveToLocalFile
fs.moveFromLocalFile
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
//文件移动
/*
* 对hdfs上的文件进行移动到本地
* */
@Test
public void testmovetoLocalFile() throws IOException {
String src = "hdfs://192.168.206.3:9000/a.txt";
String desc = "/root/";
fs.moveToLocalFile(new Path(src),new Path(desc));
System.out.println("成功移动");
}
/*
* 对本地上的文件进行移动到hdfs
* 此时使用的是moveFromLocalFile,所以会将本地的移到hdfs,即本地不存在了。
* */
@Test
public void testmovetoHdfsFile() throws IOException {
String src = "/root/b.txt";
String desc = "hdfs://192.168.206.3:9000/javaAPI";
fs.moveFromLocalFile(new Path(src),new Path(desc));
System.out.println("成功移动");
}
文件下载
fs.copyToLocalFile
//第一步,获取Hadoop FileSystem对象
private FileSystem fs = null;
//第二步,初始化环境变量
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://192.168.206.3:9000"),new Configuration(),"root");
}
/*
* 文件下载到本地
* */
@Test
public void testDownloadFile() throws IOException {
String src = "/javaAPI/mk/dir1/a.txt";
String hdfsDst = "/root/";
fs.copyToLocalFile(new Path(src),new Path(hdfsDst));
System.out.println("下载成功");
}
本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
THE END
二维码