@Private
public class DFSUtilClient
extends java.lang.Object
| Modifier and Type | Class | Description |
|---|---|---|
static class |
DFSUtilClient.CorruptedBlocks |
A utility class as a container to put corrupted blocks, shared by client
and datanode.
|
static interface |
DFSUtilClient.SnapshotDiffReportFunction |
|
static interface |
DFSUtilClient.SnapshotDiffReportListingFunction |
| Modifier and Type | Field | Description |
|---|---|---|
static byte[] |
EMPTY_BYTES |
| Constructor | Description |
|---|---|
DFSUtilClient() |
| Modifier and Type | Method | Description |
|---|---|---|
static byte[] |
byteArray2bytes(byte[][] pathComponents) |
Given a list of path components returns a byte array
|
static java.lang.String |
byteArray2String(byte[][] pathComponents) |
Given a list of path components returns a string.
|
static byte[][] |
bytes2byteArray(byte[] bytes) |
Converts a byte array to array of arrays of bytes
on byte separator.
|
static byte[][] |
bytes2byteArray(byte[] bytes,
int len,
byte separator) |
Splits first len bytes in bytes to array of arrays of bytes
on byte separator.
|
static java.lang.String |
bytes2String(byte[] bytes) |
Converts a byte array to a string using UTF8 encoding.
|
static int |
compareBytes(byte[] left,
byte[] right) |
Compare two byte arrays by lexicographical order.
|
static IOStreamPair |
connectToDN(DatanodeInfo dn,
int timeout,
org.apache.hadoop.conf.Configuration conf,
SaslDataTransferClient saslClient,
javax.net.SocketFactory socketFactory,
boolean connectToDnViaHostname,
DataEncryptionKeyFactory dekFactory,
org.apache.hadoop.security.token.Token<BlockTokenIdentifier> blockToken) |
Connect to the given datanode's datantrasfer port, and return
the resulting IOStreamPair.
|
static ClientDatanodeProtocol |
createClientDatanodeProtocolProxy(java.net.InetSocketAddress addr,
org.apache.hadoop.security.UserGroupInformation ticket,
org.apache.hadoop.conf.Configuration conf,
javax.net.SocketFactory factory) |
Create a
ClientDatanodeProtocol proxy |
static ClientDatanodeProtocol |
createClientDatanodeProtocolProxy(DatanodeID datanodeid,
org.apache.hadoop.conf.Configuration conf,
int socketTimeout,
boolean connectToDnViaHostname) |
Create
ClientDatanodeProtocol proxy using kerberos ticket |
static ClientDatanodeProtocol |
createClientDatanodeProtocolProxy(DatanodeID datanodeid,
org.apache.hadoop.conf.Configuration conf,
int socketTimeout,
boolean connectToDnViaHostname,
LocatedBlock locatedBlock) |
Create a
ClientDatanodeProtocol proxy |
static ReconfigurationProtocol |
createReconfigurationProtocolProxy(java.net.InetSocketAddress addr,
org.apache.hadoop.security.UserGroupInformation ticket,
org.apache.hadoop.conf.Configuration conf,
javax.net.SocketFactory factory) |
|
static java.lang.String |
dateToIso8601String(java.util.Date date) |
Converts a Date into an ISO-8601 formatted datetime string.
|
static java.lang.String |
durationToString(long durationMs) |
Converts a time duration in milliseconds into DDD:HH:MM:SS format.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getAddresses(org.apache.hadoop.conf.Configuration conf,
java.lang.String defaultAddress,
java.lang.String... keys) |
Returns the configured address for all NameNodes in the cluster.
|
static java.util.Map<java.lang.String,java.net.InetSocketAddress> |
getAddressesForNameserviceId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String defaultValue,
java.lang.String... keys) |
|
static java.lang.String |
getConfValue(java.lang.String defaultValue,
java.lang.String keySuffix,
org.apache.hadoop.conf.Configuration conf,
java.lang.String... keys) |
Given a list of keys in the order of preference, returns a value
for the key in the given order from the configuration.
|
static java.lang.String |
getEZTrashRoot(EncryptionZone ez,
org.apache.hadoop.security.UserGroupInformation ugi) |
Returns trash root in encryption zone.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getHaNnRpcAddresses(org.apache.hadoop.conf.Configuration conf) |
Returns list of InetSocketAddress corresponding to HA NN RPC addresses from
the configuration.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getHaNnWebHdfsAddresses(org.apache.hadoop.conf.Configuration conf,
java.lang.String scheme) |
Returns list of InetSocketAddress corresponding to HA NN HTTP addresses from
the configuration.
|
static java.lang.String |
getHomeDirectory(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.security.UserGroupInformation ugi) |
Returns current user home directory under a home directory prefix.
|
static int |
getIoFileBufferSize(org.apache.hadoop.conf.Configuration conf) |
|
static java.util.Collection<java.lang.String> |
getNameNodeIds(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId) |
Namenode HighAvailability related configuration.
|
static java.util.Collection<java.lang.String> |
getNameServiceIds(org.apache.hadoop.conf.Configuration conf) |
Returns collection of nameservice Ids from the configuration.
|
static java.net.InetSocketAddress |
getNNAddress(java.lang.String address) |
|
static java.net.InetSocketAddress |
getNNAddress(java.net.URI filesystemURI) |
|
static java.net.InetSocketAddress |
getNNAddress(org.apache.hadoop.conf.Configuration conf) |
|
static java.net.InetSocketAddress |
getNNAddressCheckLogical(org.apache.hadoop.conf.Configuration conf,
java.net.URI filesystemURI) |
Get the NN address from the URI.
|
static java.net.URI |
getNNUri(java.net.InetSocketAddress namenode) |
|
static float |
getPercentRemaining(long remaining,
long capacity) |
Return remaining as percentage of capacity
|
static float |
getPercentUsed(long used,
long capacity) |
Return used as percentage of capacity
|
static java.util.Map<java.lang.String,java.net.InetSocketAddress> |
getResolvedAddressesForNnId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId,
org.apache.hadoop.net.DomainNameResolver dnr,
java.lang.String defaultValue,
java.lang.String... keys) |
|
static int |
getSmallBufferSize(org.apache.hadoop.conf.Configuration conf) |
|
static SnapshotDiffReport |
getSnapshotDiffReport(java.lang.String snapshotDir,
java.lang.String fromSnapshot,
java.lang.String toSnapshot,
DFSUtilClient.SnapshotDiffReportFunction withoutListing,
DFSUtilClient.SnapshotDiffReportListingFunction withListing) |
|
static java.lang.String |
getSnapshotTrashRoot(java.lang.String ssRoot,
org.apache.hadoop.security.UserGroupInformation ugi) |
Returns trash root in a snapshottable directory.
|
static java.util.concurrent.ThreadPoolExecutor |
getThreadPoolExecutor(int corePoolSize,
int maxPoolSize,
long keepAliveTimeSecs,
java.lang.String threadNamePrefix,
boolean runRejectedExec) |
Utility to create a
ThreadPoolExecutor. |
static java.util.concurrent.ThreadPoolExecutor |
getThreadPoolExecutor(int corePoolSize,
int maxPoolSize,
long keepAliveTimeSecs,
java.util.concurrent.BlockingQueue<java.lang.Runnable> queue,
java.lang.String threadNamePrefix,
boolean runRejectedExec) |
Utility to create a
ThreadPoolExecutor. |
static java.lang.String |
getTrashRoot(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.security.UserGroupInformation ugi) |
Returns trash root in non-encryption zone.
|
static boolean |
isHDFSEncryptionEnabled(org.apache.hadoop.conf.Configuration conf) |
Probe for HDFS Encryption being enabled; this uses the value of the option
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH
, returning true if that property contains a non-empty, non-whitespace
string. |
static boolean |
isLocalAddress(java.net.InetSocketAddress targetAddr) |
|
static boolean |
isValidName(java.lang.String src) |
Whether the pathname is valid.
|
static boolean |
isValidSnapshotName(java.lang.String snapshotName) |
Returns true if the name of snapshot is vlaid.
|
static org.apache.hadoop.fs.BlockLocation[] |
locatedBlocks2Locations(java.util.List<LocatedBlock> blocks) |
Convert a List to BlockLocation[]
|
static org.apache.hadoop.fs.BlockLocation[] |
locatedBlocks2Locations(LocatedBlocks blocks) |
Convert a LocatedBlocks to BlockLocations[]
|
static org.apache.hadoop.fs.Path |
makePathFromFileId(long fileId) |
Create the internal unique file path from HDFS file ID (inode ID).
|
static Peer |
peerFromSocket(java.net.Socket socket) |
|
static Peer |
peerFromSocketAndKey(SaslDataTransferClient saslClient,
java.net.Socket s,
DataEncryptionKeyFactory keyFactory,
org.apache.hadoop.security.token.Token<BlockTokenIdentifier> blockToken,
DatanodeID datanodeId,
int socketTimeoutMs) |
|
static java.lang.String |
percent2String(double percentage) |
Convert percentage to a string.
|
static byte[] |
string2Bytes(java.lang.String str) |
Converts a string to a byte array using UTF8 encoding.
|
static java.io.InterruptedIOException |
toInterruptedIOException(java.lang.String message,
java.lang.InterruptedException e) |
public static byte[] string2Bytes(java.lang.String str)
public static java.lang.String bytes2String(byte[] bytes)
public static byte[][] bytes2byteArray(byte[] bytes)
public static byte[][] bytes2byteArray(byte[] bytes,
int len,
byte separator)
bytes - the byte array to splitlen - the number of bytes to splitseparator - the delimiting bytepublic static float getPercentUsed(long used,
long capacity)
public static float getPercentRemaining(long remaining,
long capacity)
public static java.lang.String percent2String(double percentage)
public static java.util.Collection<java.lang.String> getNameServiceIds(org.apache.hadoop.conf.Configuration conf)
conf - configurationpublic static java.util.Collection<java.lang.String> getNameNodeIds(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId)
conf - configurationnsId - the nameservice ID to look at, or null for non-federatedpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getHaNnRpcAddresses(org.apache.hadoop.conf.Configuration conf)
conf - configurationpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getHaNnWebHdfsAddresses(org.apache.hadoop.conf.Configuration conf,
java.lang.String scheme)
public static org.apache.hadoop.fs.BlockLocation[] locatedBlocks2Locations(LocatedBlocks blocks)
blocks - a LocatedBlockspublic static org.apache.hadoop.fs.BlockLocation[] locatedBlocks2Locations(java.util.List<LocatedBlock> blocks)
blocks - A List to be convertedpublic static int compareBytes(byte[] left,
byte[] right)
public static byte[] byteArray2bytes(byte[][] pathComponents)
public static java.lang.String byteArray2String(byte[][] pathComponents)
public static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getAddresses(org.apache.hadoop.conf.Configuration conf,
java.lang.String defaultAddress,
java.lang.String... keys)
conf - configurationdefaultAddress - default address to return in case key is not found.keys - Set of keys to look for in the order of preferencepublic static java.util.Map<java.lang.String,java.net.InetSocketAddress> getResolvedAddressesForNnId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId,
org.apache.hadoop.net.DomainNameResolver dnr,
java.lang.String defaultValue,
java.lang.String... keys)
public static java.util.Map<java.lang.String,java.net.InetSocketAddress> getAddressesForNameserviceId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String defaultValue,
java.lang.String... keys)
public static java.lang.String getConfValue(java.lang.String defaultValue,
java.lang.String keySuffix,
org.apache.hadoop.conf.Configuration conf,
java.lang.String... keys)
defaultValue - default value to return, when key was not foundkeySuffix - suffix to add to the key, if it is not nullconf - Configurationkeys - list of keys in the order of preferencepublic static boolean isValidName(java.lang.String src)
public static java.lang.String durationToString(long durationMs)
public static java.lang.String dateToIso8601String(java.util.Date date)
public static boolean isLocalAddress(java.net.InetSocketAddress targetAddr)
throws java.io.IOException
java.io.IOExceptionpublic static ClientDatanodeProtocol createClientDatanodeProtocolProxy(DatanodeID datanodeid, org.apache.hadoop.conf.Configuration conf, int socketTimeout, boolean connectToDnViaHostname, LocatedBlock locatedBlock) throws java.io.IOException
ClientDatanodeProtocol proxyjava.io.IOExceptionpublic static ClientDatanodeProtocol createClientDatanodeProtocolProxy(DatanodeID datanodeid, org.apache.hadoop.conf.Configuration conf, int socketTimeout, boolean connectToDnViaHostname) throws java.io.IOException
ClientDatanodeProtocol proxy using kerberos ticketjava.io.IOExceptionpublic static ClientDatanodeProtocol createClientDatanodeProtocolProxy(java.net.InetSocketAddress addr, org.apache.hadoop.security.UserGroupInformation ticket, org.apache.hadoop.conf.Configuration conf, javax.net.SocketFactory factory) throws java.io.IOException
ClientDatanodeProtocol proxyjava.io.IOExceptionpublic static ReconfigurationProtocol createReconfigurationProtocolProxy(java.net.InetSocketAddress addr, org.apache.hadoop.security.UserGroupInformation ticket, org.apache.hadoop.conf.Configuration conf, javax.net.SocketFactory factory) throws java.io.IOException
java.io.IOExceptionpublic static Peer peerFromSocket(java.net.Socket socket) throws java.io.IOException
java.io.IOExceptionpublic static Peer peerFromSocketAndKey(SaslDataTransferClient saslClient, java.net.Socket s, DataEncryptionKeyFactory keyFactory, org.apache.hadoop.security.token.Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId, int socketTimeoutMs) throws java.io.IOException
java.io.IOExceptionpublic static int getIoFileBufferSize(org.apache.hadoop.conf.Configuration conf)
public static int getSmallBufferSize(org.apache.hadoop.conf.Configuration conf)
public static boolean isHDFSEncryptionEnabled(org.apache.hadoop.conf.Configuration conf)
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH
, returning true if that property contains a non-empty, non-whitespace
string.conf - configuration to probepublic static java.net.InetSocketAddress getNNAddress(java.lang.String address)
public static java.net.InetSocketAddress getNNAddress(org.apache.hadoop.conf.Configuration conf)
public static java.net.InetSocketAddress getNNAddress(java.net.URI filesystemURI)
public static java.net.InetSocketAddress getNNAddressCheckLogical(org.apache.hadoop.conf.Configuration conf,
java.net.URI filesystemURI)
conf - configurationfilesystemURI - URI of the file systempublic static java.net.URI getNNUri(java.net.InetSocketAddress namenode)
public static java.io.InterruptedIOException toInterruptedIOException(java.lang.String message,
java.lang.InterruptedException e)
public static IOStreamPair connectToDN(DatanodeInfo dn, int timeout, org.apache.hadoop.conf.Configuration conf, SaslDataTransferClient saslClient, javax.net.SocketFactory socketFactory, boolean connectToDnViaHostname, DataEncryptionKeyFactory dekFactory, org.apache.hadoop.security.token.Token<BlockTokenIdentifier> blockToken) throws java.io.IOException
java.io.IOExceptionpublic static java.util.concurrent.ThreadPoolExecutor getThreadPoolExecutor(int corePoolSize,
int maxPoolSize,
long keepAliveTimeSecs,
java.lang.String threadNamePrefix,
boolean runRejectedExec)
ThreadPoolExecutor.corePoolSize - - min threads in the pool, even if idlemaxPoolSize - - max threads in the poolkeepAliveTimeSecs - - max seconds beyond which excess idle threads
will be terminatedthreadNamePrefix - - name prefix for the pool threadsrunRejectedExec - - when true, rejected tasks from
ThreadPoolExecutor are run in the context of calling threadpublic static java.util.concurrent.ThreadPoolExecutor getThreadPoolExecutor(int corePoolSize,
int maxPoolSize,
long keepAliveTimeSecs,
java.util.concurrent.BlockingQueue<java.lang.Runnable> queue,
java.lang.String threadNamePrefix,
boolean runRejectedExec)
ThreadPoolExecutor.corePoolSize - - min threads in the pool, even if idlemaxPoolSize - - max threads in the poolkeepAliveTimeSecs - - max seconds beyond which excess idle threads
will be terminatedqueue - - the queue to use for holding tasks before they are executed.threadNamePrefix - - name prefix for the pool threadsrunRejectedExec - - when true, rejected tasks from
ThreadPoolExecutor are run in the context of calling threadpublic static org.apache.hadoop.fs.Path makePathFromFileId(long fileId)
fileId - File ID.public static java.lang.String getHomeDirectory(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.security.UserGroupInformation ugi)
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY.
User info is obtained from given UserGroupInformation.conf - configurationugi - UserGroupInformation of current user.public static java.lang.String getTrashRoot(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.security.UserGroupInformation ugi)
conf - configuration.ugi - user of trash owner.public static java.lang.String getEZTrashRoot(EncryptionZone ez, org.apache.hadoop.security.UserGroupInformation ugi)
ez - encryption zone.ugi - user of trash owner.public static java.lang.String getSnapshotTrashRoot(java.lang.String ssRoot,
org.apache.hadoop.security.UserGroupInformation ugi)
ssRoot - String of path to a snapshottable directory root.ugi - user of trash owner.public static boolean isValidSnapshotName(java.lang.String snapshotName)
snapshotName - name of the snapshot.public static SnapshotDiffReport getSnapshotDiffReport(java.lang.String snapshotDir, java.lang.String fromSnapshot, java.lang.String toSnapshot, DFSUtilClient.SnapshotDiffReportFunction withoutListing, DFSUtilClient.SnapshotDiffReportListingFunction withListing) throws java.io.IOException
java.io.IOExceptionCopyright © 2008–2025 Apache Software Foundation. All rights reserved.