设为首页 加入收藏

TOP

hadoop批量上传和下载文件
2019-02-12 12:36:40 】 浏览:29
Tags:hadoop 批量 上传 下载 文件
package com.test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import junit.framework.TestCase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class FileSystemTest extends TestCase {

	public void test() {
		try {
			FileSystem fileSystem = null;
			try {
				fileSystem = FileSystem.get(new URI(
						"hdfs://192.168.110.201:9000"), new Configuration(),
						"root");
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
			loadFile(fileSystem,new Path("/test"),new File("load"));
			upLoadFile(fileSystem,new File("D:\\netty-master"), new Path("/upload"));
			fileSystem.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (URISyntaxException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	private void loadFile(FileSystem fileSystem, Path srcPath, File dstnFile)
			throws IllegalArgumentException, IOException {
		FileStatus fileStatus = fileSystem.getFileStatus(srcPath);
		String newPath = dstnFile.getPath() + File.separator+ fileStatus.getPath().getName();
		if (fileStatus.isDirectory()) {
			dstnFile = new File(newPath);
			if(!dstnFile.exists()){
				dstnFile.mkdirs();
			}
			FileStatus contents[] = fileSystem.listStatus(srcPath);
			for (int i = 0; i < contents.length; i++) {
				loadFile(fileSystem, contents[i].getPath(),dstnFile);
			}
		}else{
			FSDataInputStream inputStream = null;
			FileOutputStream fileOutputStream = null;
			try {
				inputStream = fileSystem.open(srcPath);
				fileOutputStream = new FileOutputStream(newPath);
				byte[] buffer = new byte[4096];
				int len;
				while ((len = inputStream.read(buffer)) > 0) {
					fileOutputStream.write(buffer, 0, len);
				}
			} finally {
				if(fileOutputStream != null){
					fileOutputStream.close();
				}
				if(inputStream != null){
					inputStream.close();
				}
			}
		}
	}
	
  private void upLoadFile(FileSystem fileSystem, File srcFile, Path dstnPath)
		throws IllegalArgumentException, IOException {
	String newPath = dstnPath.toString() + File.separator + srcFile.getName();
	dstnPath = new Path(newPath);
	if (srcFile.isDirectory()) {
		if(!fileSystem.exists(dstnPath)){
			fileSystem.mkdirs(dstnPath);
		}
		File[] listFiles = srcFile.listFiles();
		for (int i = 0; i < listFiles.length; i++) {
			upLoadFile(fileSystem, listFiles[i],dstnPath);
		}
	}else{
		FSDataOutputStream outputStream = null;
		FileInputStream fileOutputStream = null;
		try {
			outputStream = fileSystem.create(dstnPath, true);
			fileOutputStream = new FileInputStream(srcFile);
			byte[] buffer = new byte[4096];
			int len;
			while ((len = fileOutputStream.read(buffer)) > 0) {
				outputStream.write(buffer, 0, len);
			}
		} finally {
			if(fileOutputStream != null){
				fileOutputStream.close();
			}
			if(outputStream != null){
				outputStream.close();
			}
		}
	}
  }
}


编程开发网
】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
上一篇hadoop 无法启动namenode (两个.. 下一篇在Ubuntu中安装Hadoop(本地单机..

评论

帐  号: 密码: (新用户注册)
验 证 码:
表  情:
内  容:

array(4) { ["type"]=> int(8) ["message"]=> string(24) "Undefined variable: jobs" ["file"]=> string(32) "/mnt/wp/cppentry/do/bencandy.php" ["line"]=> int(214) }