自学内容网 自学内容网

hadoop 统计hdfs中ssd使用情况

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import java.net.URI

fun main() {
val config = Configuration()
config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem")
val fs = FileSystem.get(URI("hdfs://host:8020"), config, "hdfs")

val files = fs.listStatus(Path("/hbase"))
val res = mutableMapOf<String, Long>()
var first = true
val dirs = listOf<String>()
files.forEach { dir ->
val tableName = dir.path.toString().split("/")[8]
if (!dirs.contains(tableName)) {
val files = fs.listFiles(Path(dir.path.toUri().path), true)
while (files.hasNext()) {
val fileStatus = files.next()
val blocks = fileStatus.blockLocations
blocks.forEach { blockLocation ->
val blockCount = blockLocation.storageTypes.count { it.name == "SSD" }
res[tableName] = (res[tableName] ?: 0) + (blockCount * blockLocation.length)

if (System.currentTimeMillis() / 1000 % 10 == 0L) {
if (first) {
println(res.map { "${it.key}:${it.value}:${it.value / 1024 / 1024 / 1024}GB" })
first = false
}
} else {
first = true
}
}
}
}
}
println("==done=======================================================")
res.forEach{
println("${it.key}:${it.value}:${it.value / 1024 / 1024 / 1024} GB")
}
}


原文地址:https://blog.csdn.net/m0_65850671/article/details/142356310

免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!