hadoop 的hdfsapi操作

  • hdfs 的api 相关操作

一: hdfs api 操作

1.1 读取内容:


package it.xuegod.hadoop.hdfs;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.FileSystem;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.junit.Test;
import org.apache.hadoop.conf.Configuration;

public class TestHdfs {

    @Test
    public void readFile() throws IOException {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());

        URL url = new URL("hdfs://172.17.100.11:8020/input/file1");
        URLConnection conn = url.openConnection();      
        InputStream is = conn.getInputStream();
        byte[] buf = new byte[is.available()];
        is.read(buf);
        is.close();
        String str = new String(buf);
        System.out.println(str);

    }

}

image_1c98o2fl57uadao14f7gvr4j9.png-72kB

1.2 读取hdfs的数据

package it.xuegod.hadoop.hdfs;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

/**
 * 
 * 
 * @author zhangyy
 *
 *read file
 *
 */

public class readFileApi {

    @Test
    public void readFileByApi() throws IOException {

        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://172.17.100.11:8020");
        FileSystem fs = FileSystem.get(conf);

        Path p = new Path("/input/file1");
        FSDataInputStream fis = fs.open(p);
        byte[] buf = new byte[1024];
        int len = -1;

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        while((len = fis.read(buf)) != -1) {
            baos.write(buf, 0, len);

        }
        fis.close();
        baos.close();

        System.out.println(new String(baos.toByteArray()));     
    }

}

image_1c98o5d8p146r14b917r512ek18qvm.png-70.4kB

package it.xuegod.hadoop.hdfs;
import java.io.ByteArrayOutputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

/**
 * 
 * 
 * @author zhangyy
 * 
 * readfile 
 *
 */
public class readFileApi2 {

    @Test
    public void readFileByApi() throws IOException {

        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://172.17.100.11:8020");
        FileSystem fs = FileSystem.get(conf);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        Path p = new Path("/input/file1");
        FSDataInputStream fis = fs.open(p);
        IOUtils.copyBytes(fis, baos, 1024);
        System.out.println(new String(baos.toByteArray()));     
    }

}

image_1c98o6ms2ubu1dda1sn5t6e1hqm13.png-70.7kB

1.3: 在hdfs上面的建立文件

package it.xuegod.hadoop.hdfs;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

/**
 * 
 * 
 * @author zhangyy
 * 
 * readfile 
 *
 */
public class mkdirFileApi {

    @Test
    public void mkdirFileByApi() throws IOException {

        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://172.17.100.11:8020");
        FileSystem fs = FileSystem.get(conf);
        fs.mkdirs(new Path("/myhadoop"));

    }

}

image_1c98o9qprs1l1vqshmk1bap1pdp1g.png-75.6kB
image_1c98oaek38hb11bu7vo1ca9skn1t.png-137.4kB
###1.4: put写入文件

package it.xuegod.hadoop.hdfs;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

/**
 * 
 * 
 * @author zhangyy
 * 
 * readfile 
 *
 */
public class putFileApi {

    @Test
    public void putFileByApi() throws IOException {

        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://172.17.100.11:8020");
        FileSystem fs = FileSystem.get(conf);
        FSDataOutputStream out = fs.create(new Path("/myhadoop/a.txt"));
        out.write("helloword".getBytes());
        out.close();
    }

}

image_1c98oed6ptc1du91rqvjefimd2a.png-64.1kB
image_1c98oev3f1lv0o2fve41f5mgpq2n.png-114.5kB

1.5 删除文件

package it.xuegod.hadoop.hdfs;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

/**
 * 
 * 
 * @author zhangyy
 * 
 * rmfile 
 *
 */
public class rmFileApi {

    @Test
    public void rmFileByApi() throws IOException {

        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://172.17.100.11:8020");
        FileSystem fs = FileSystem.get(conf);
        Path p = new Path("/myhadoop");
        fs.delete(p,true);
    }

}

image_1c98oginqra96qihqd1e4p10e834.png-49.9kB
image_1c98oh3uf1lp6ldl137nctm8ns3h.png-102.2kB

转载于:https://blog.51cto.com/flyfish225/2096394

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值