閱讀437 返回首頁    go 阿裏雲 go 技術社區[雲棲]


HDFS API???????????????(1)-??????-????????????-?????????

package cn.framelife.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 * ?????????????????????HDFS
 * @author ?????????
 *
 */
public class CopyFile {
	public static void main(String[] args) {
		
		Configuration configuration = new Configuration();
		
		//???????????????????????????hdfs?????????????????????????????????????????? 
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));
		
		try {
			//?????????????????????hdfs????????????
			FileSystem hdfs = FileSystem.get(configuration);
			
			//?????????
			Path src = new Path("/home/benben/abc");
			
			//hdfs?????????????????????
			Path dst = new Path("/user/");
			
			hdfs.copyFromLocalFile(src, dst);
			System.out.println("upload to"+configuration.get("fs.default.name"));
			
			//??????hdfs??????????????????
			FileStatus[] files = hdfs.listStatus(dst);
			for (FileStatus file : files) {
				System.out.println(file.getPath());
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		
	}
}

package cn.framelife.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 * ???HDFS?????????????????????
 * 
 * @author ?????????
 * 
 */
public class CreateFile {

	public static void main(String[] args) {
		Configuration configuration = new Configuration();
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));

		try {
			FileSystem hdfs = FileSystem.get(configuration);

			byte[] buffer = "Hello Hadoop".getBytes();

			Path newPath = new Path("/user/hello1.txt");

			FSDataOutputStream outputStream = hdfs.create(newPath);

			outputStream.write(buffer, 0, buffer.length);

		} catch (IOException e) {
			e.printStackTrace();
		}
	}
}

package cn.framelife.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 * ??????HDFS??????
 * @author ?????????
 *
 */
public class DeleteFile {

	public static void main(String[] args) {
		Configuration configuration = new Configuration();
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));
		
		try {
			FileSystem hdfs = FileSystem.get(configuration);
			
			Path pathOfHDFS = new Path("/user/hello1.txt");
			
			//??????????????????
			if(hdfs.exists(pathOfHDFS)){
				/* 
				 * if path is a directory and set to true, the directory is deleted else throws an exception. 
				 * In case of a file the recursive can be set to either true or false.
				*/
				hdfs.delete(pathOfHDFS, false);
			}
			
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

}

package cn.framelife.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Rename {

	/**
	 * ???HDFS????????????????????????
	 * @param ?????????
	 */
	public static void main(String[] args) {
		Configuration configuration = new Configuration();
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));
		
		try {
			FileSystem hdfs = FileSystem.get(configuration);
			
			Path pathOfHDFS = new Path("/user/abc.txt");
			Path newPathName = new Path("/user/abcd.txt");
			
			//???????????????true
			boolean isRename = hdfs.rename(pathOfHDFS, newPathName);
			
			System.out.println(isRename);
			
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

}

package cn.framelife.hadoop;

import java.io.IOException;
import java.util.Arrays;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 * ?????????????????????HDFS???????????????  
 * @author ?????????
 *
 */
public class WhereIsFile {

	public static void main(String[] args) {
		Configuration configuration = new Configuration();
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));
		
		try {
			FileSystem hdfs = FileSystem.get(configuration);
			
			Path pathOfHDFS = new Path("/user/hello.txt");
			
			FileStatus fileStatus =  hdfs.getFileStatus(pathOfHDFS);  
	        BlockLocation[] blockLocations = hdfs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());  
	        for(BlockLocation block : blockLocations){  
	        	System.out.println(Arrays.toString(block.getHosts())+ "\t" + Arrays.toString(block.getNames()));  
	        }  
		} catch (IOException e) {
			e.printStackTrace();
		}
			
		  
	}

}

package cn.framelife.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class GetLastTime {

	/**
	 * ??????HDFS???????????????????????????
	 * @param ?????????
	 */
	public static void main(String[] args) {
		Configuration configuration = new Configuration();
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));
		
		try {
			FileSystem hdfs = FileSystem.get(configuration);
			
			Path pathOfHDFS = new Path("/user/abcd.txt");
			
			FileStatus file = hdfs.getFileStatus(pathOfHDFS);
			long time = file.getModificationTime();
			
			System.out.println(time);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

}

package cn.framelife.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

/**
 * ??????HDFS???????????????????????????
 * 
 * @author ?????????
 * 
 */
public class GetDataNodeName {
	public static void main(String[] args) {

		Configuration configuration = new Configuration();
		configuration.addResource(new Path("core-site.xml"));
		configuration.addResource(new Path("hdfs-site.xml"));

		DistributedFileSystem hdfs;
		try {
			hdfs = (DistributedFileSystem) FileSystem.get(configuration);
			DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();

			for (DatanodeInfo dataNode : dataNodeStats) {
				System.out.println(dataNode.getHostName() + "\t" + dataNode.getName());
			}

		} catch (IOException e) {
			e.printStackTrace();
		}

	}
}


最後更新:2017-04-03 16:59:42

  上一篇:go 使用ERStudio6創建數據表ER圖並導出數據表的SQL(DDL)語句
  下一篇:go 【進程線程與同步】5.3 創建與聯接線程