小编给大家分享一下HDFS基本常用命令有哪些,相信大部分人都还不怎么了解,因此分享这篇文章给大家参考一下,希望大家阅读完这篇文章后大有收获,下面让我们一起去了解一下吧!
一:查看HDFS文件的最后修改时间
public class Test6GetLTime { /** * 查看HDFS文件的最后修改时间 * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); Path dfs = new Path("hdfs://192.168.226.129:9000/"); FileStatus fileStatus = fs.getFileStatus(dfs); long modificationTime = fileStatus.getModificationTime(); System.out.println( "Modefication time is: " + modificationTime ); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
二:查找某个文件在HDFS集群的位置
public class Test7FileLocation { /** * 查找某个文件在HDFS集群的位置 * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); Path dfs = new Path("hdfs://192.168.226.129:9000/rootdir/ssh.txt"); FileStatus fileStatus = fs.getFileStatus(dfs); BlockLocation[] blkLocations = fs.getFileBlockLocations(fileStatus,0, fileStatus.getLen() ); int blockLen = blkLocations.length; System.out.println("blockLen of length : " +blockLen ); for( int i=0;i<blockLen; i++){ String[] hosts = blkLocations[i].getHosts(); System.out.println("Block " + i +" Location: " + hosts[i]); } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
三: 获取HDFS集群上所有节点名称
public class Test8GetList { /** * 获取HDFS集群上所有节点名称: * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri,conf); DistributedFileSystem hdfs = (DistributedFileSystem)fs; DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats(); String[] names = new String[dataNodeStats.length]; int dataNodeLen = dataNodeStats.length; for( int i=0; i<dataNodeLen;i++){ names[i] = dataNodeStats[i].getHostName(); System.out.println("Node " + i + " Name: "+ names[i] ); } } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
四: 上传一个视频文件至HDFS,( 非实时视频流)
public class UploadLive { public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); FileSystem local = FileSystem.getLocal(conf); //确定需要上传视频流路径和接收视频流路径 Path inputDir = new Path("F:\\AHadoopTestFile"); Path hdfsFile = new Path("hdfs://192.168.226.129:9000/testhadoop/acceptLiveFile"); System.out.println( inputDir.toString()); //创建HDFS上 "acceptLiveFile" 目录 用来接收视频文件 boolean isExist = fs.exists( hdfsFile ); if( !isExist ){ fs.mkdirs(hdfsFile); System.out.println(" 创建新的目录文件成功..."); } FileStatus[] inputFiles = local.listStatus(inputDir); FSDataOutputStream out; //通过OutputStream.write()来将视频文件写入HDFS下的指定目录: int inputFileslen = inputFiles.length; for( int i=0;i<inputFileslen;i++){ System.out.println( inputFiles[i].getPath().getName() ); FSDataInputStream in = local.open(inputFiles[i].getPath() ); out = fs.create( new Path("hdfs://192.168.226.129:9000/testhadoop/acceptLiveFile/"+inputFiles[i].getPath().getName())); byte [] buffer = new byte[1024]; int byteRead = 0; while((byteRead = in.read(buffer))>0 ){ out.write(buffer,0,byteRead); } out.close(); in.close(); File file = new File( inputFiles[i].getPath().toString()); file.delete(); } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
以上是“HDFS基本常用命令有哪些”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注亿速云行业资讯频道!
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。