HDFS API的簡單使用(2)
package cn.framelife.hadoop; import java.io.IOException; import java.io.InputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; /** * 使用FileSystem API讀取數據 * @author 潘廣偉 * */ public class FileSystemCat { public static Configuration getConf(){ Configuration configuration = new Configuration(); configuration.addResource(new Path("core-site.xml")); configuration.addResource(new Path("hdfs-site.xml")); return configuration; } public static void main(String[] args) { InputStream in = null; String url = "hdfs://namenode:9000/user/hadoop/hello1.txt"; try { FileSystem fs = FileSystem.get(getConf()); in = fs.open(new Path(url)); IOUtils.copyBytes(in, System.out, in.available(), false); } catch (IOException e) { e.printStackTrace(); }finally{ IOUtils.closeStream(in); } } }
package cn.framelife.hadoop; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; import org.apache.hadoop.io.IOUtils; /** * 使用Hadoop URL讀取數據 * @author 潘廣偉 * */ public class URLCat { static{ URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); } public static void main(String[] args) { InputStream in = null; String url = "hdfs://namenode:9000/user/hadoop/hello1.txt"; try { in = new URL(url).openStream(); byte[] b = new byte[in.available()]; in.read(b, 0, in.available()); String msg = new String(b); System.out.println("接收到的信息:"+msg); //下麵是通過IOUtils工具把輸入流中的數據使用係統輸出 // IOUtils.copyBytes(in, System.out, 4096, false); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); }finally{ IOUtils.closeStream(in); } } }
最後更新:2017-04-03 16:59:46