HDFS基本操作 javaApi

上传文件到HDFS:(对HDFS的操作,注意config的配置)

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

public class trytry {

    public static void main(String[] args) {

        FileSystem hdfs = null;

        try {

            //目标文件习,在HDFS根目录下新建一个文件夹/hadoop/temp

            String destfile = "/hadoop/tmp/" + System.currentTimeMillis();

            //源文件

            String fsrc = "/usr/hahahah.txt";

            Configuration config = new Configuration();

            // 程序配置

            config.set("fs.default.name", "hdfs://192.168.146.130:9000");

            hdfs = FileSystem

                    .get(new URI("hdfs://192.168.146.130:9000"), config, "hadoop");

            //hadoop是用户名

            Path srcPath = new Path(fsrc);

            Path destPath = new Path(destfile);

            boolean delSrc = true;

            hdfs.copyFromLocalFile(delSrc, srcPath, destPath);

            System.out.println("It is over : " + destfile);

        } catch (Exception e) {

            e.printStackTrace();

        } finally {

            if (hdfs != null) {

                try {

                    hdfs.closeAll();

                } catch (Exception e) {

                    e.printStackTrace();

                }

            }

        }

    }

}

HDFS之中创建文件

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

public class CreatFile {

public static void main(String args[]) throws Exception{

Configuration cof=new Configuration();

byte[] buff="hello word!".getBytes();

FileSystem hdfs=null;

  cof.set("fs.default.name", "hdfs://192.168.146.130:9000");

         hdfs = FileSystem

                  .get(new URI("hdfs://192.168.146.130:9000"), cof, "hadoop");

Path dfs=new Path("/lilan1");

FSDataOutputStream outputStream=hdfs.create(dfs);

outputStream.write(buff, 0, buff.length);

System.out.println("cdhngegb");

}

}

重命名HDFS文件的文件名:

import java.net.URI;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

public class rename {

public static void main(String args[]) throws Exception{

Configuration cof=new Configuration();

FileSystem hdfs=null;

  cof.set("fs.default.name", "hdfs://192.168.146.130:9000");

          hdfs = FileSystem

                  .get(new URI("hdfs://192.168.146.130:9000"), cof, "hadoop1");

Path sname=new Path("/lilan1");

Path dname=new Path("/lilan2");

hdfs.rename(sname, dname);

}

}

查看HDFS中的文件的最后修改时间:

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileStatus;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

public class finalEditTime {

public static void main(String args[]) throws Exception{

Configuration cof=new Configuration();

FileSystem hdfs=null;

  cof.set("fs.default.name", "hdfs://192.168.146.130:9000");

          hdfs = FileSystem

                  .get(new URI("hdfs://192.168.146.130:9000"), cof, "hadoop1");

Path pa=new Path("/lilan2");

FileStatus filestatus=hdfs.getFileStatus(pa);

long time=filestatus.getModificationTime();

System.out.println("The last Edit time is:"+time);

}

}


  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值