@Private
public class DFSUtil
extends java.lang.Object
| Modifier and Type | Class | Description |
|---|---|---|
static class |
DFSUtil.ConfiguredNNAddress |
Represent one of the NameNodes configured in the cluster.
|
static class |
DFSUtil.ServiceComparator |
Comparator for sorting DataNodeInfo[] based on
decommissioned and entering_maintenance states.
|
static class |
DFSUtil.StaleAndSlowComparator |
Comparator for sorting DataNodeInfo[] based on
slow, stale, entering_maintenance, decommissioning and decommissioned states.
|
| Modifier and Type | Field | Description |
|---|---|---|
static org.apache.commons.cli.Option |
helpOpt |
|
static org.apache.commons.cli.Options |
helpOptions |
|
static org.slf4j.Logger |
LOG |
| Modifier and Type | Method | Description |
|---|---|---|
static void |
addInternalPBProtocol(org.apache.hadoop.conf.Configuration conf,
java.lang.Class<?> protocol,
org.apache.hadoop.thirdparty.protobuf.BlockingService service,
org.apache.hadoop.ipc.RPC.Server server) |
Add protobuf based protocol to the
RPC.Server. |
static java.lang.String |
addKeySuffixes(java.lang.String key,
java.lang.String... suffixes) |
Return configuration key of format key.suffix1.suffix2...suffixN
|
static void |
addPBProtocol(org.apache.hadoop.conf.Configuration conf,
java.lang.Class<?> protocol,
com.google.protobuf.BlockingService service,
org.apache.hadoop.ipc.RPC.Server server) |
Deprecated.
|
static void |
addPBProtocol(org.apache.hadoop.conf.Configuration conf,
java.lang.Class<?> protocol,
org.apache.hadoop.thirdparty.protobuf.BlockingService service,
org.apache.hadoop.ipc.RPC.Server server) |
Deprecated.
|
static java.lang.String |
addressMapToString(java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> map) |
Format the given map, as returned by other functions in this class,
into a string suitable for debugging display.
|
static void |
addTransferRateMetric(DataNodeMetrics metrics,
long read,
long durationInNS) |
Add transfer rate metrics in bytes per second.
|
static void |
assertAllResultsEqual(java.util.Collection<?> objects) |
Assert that all objects in the collection are equal.
|
static java.lang.String |
byteArray2PathString(byte[][] pathComponents) |
|
static java.lang.String |
byteArray2PathString(byte[][] components,
int offset,
int length) |
Given a list of path components returns a path as a UTF8 String
|
static byte[][] |
bytes2byteArray(byte[] bytes,
byte separator) |
Splits the array of bytes into array of arrays of bytes
on byte separator
|
static byte[][] |
bytes2byteArray(byte[] bytes,
int len,
byte separator) |
Splits first len bytes in bytes to array of arrays of bytes
on byte separator
|
static java.lang.String |
bytes2String(byte[] bytes) |
Converts a byte array to a string using UTF8 encoding.
|
static java.lang.String |
bytes2String(byte[] bytes,
int offset,
int length) |
Decode a specific range of bytes of the given byte array to a string
using UTF8.
|
static void |
checkProtectedDescendants(FSDirectory fsd,
INodesInPath iip) |
Throw if the given directory has any non-empty protected descendants
(including itself).
|
static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension |
createKeyProviderCryptoExtension(org.apache.hadoop.conf.Configuration conf) |
Creates a new KeyProviderCryptoExtension by wrapping the
KeyProvider specified in the given Configuration.
|
static java.net.URI |
createUri(java.lang.String scheme,
java.lang.String host,
int port) |
Create an URI from scheme, host, and port.
|
static java.net.URI |
createUri(java.lang.String scheme,
java.net.InetSocketAddress address) |
Create an URI from scheme and address.
|
static java.lang.String |
dateToIso8601String(java.util.Date date) |
Converts a Date into an ISO-8601 formatted datetime string.
|
static org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier |
decodeDelegationToken(org.apache.hadoop.security.token.Token<org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier> token) |
Decodes an HDFS delegation token to its identifier.
|
static java.lang.String |
durationToString(long durationMs) |
Converts a time duration in milliseconds into DDD:HH:MM:SS format.
|
static java.util.List<DFSUtil.ConfiguredNNAddress> |
flattenAddressMap(java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> map) |
Flatten the given map, as returned by other functions in this class,
into a flat list of
DFSUtil.ConfiguredNNAddress instances. |
static java.util.Set<java.lang.String> |
getAllNnPrincipals(org.apache.hadoop.conf.Configuration conf) |
|
static java.lang.String |
getBackupNameServiceId(org.apache.hadoop.conf.Configuration conf) |
Get nameservice Id for the BackupNode based on backup node RPC address
matching the local node address.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getBackupNodeAddresses(org.apache.hadoop.conf.Configuration conf) |
Returns list of InetSocketAddress corresponding to backup node rpc
addresses from the configuration.
|
static java.net.InetSocketAddress |
getBindAddress(org.apache.hadoop.conf.Configuration conf,
java.lang.String confKey,
java.lang.String defaultValue,
java.lang.String bindHostKey) |
Determine the
InetSocketAddress to bind to, for any service. |
static java.util.EnumSet<org.apache.hadoop.hdfs.protocol.HdfsFileStatus.Flags> |
getFlags(boolean isEncrypted,
boolean isErasureCoded,
boolean isSnapShottable,
boolean hasAcl) |
Generates HdfsFileStatus flags.
|
static java.lang.String |
getHttpClientScheme(org.apache.hadoop.conf.Configuration conf) |
Determine whether HTTP or HTTPS should be used to connect to the remote
server.
|
static org.apache.hadoop.http.HttpConfig.Policy |
getHttpPolicy(org.apache.hadoop.conf.Configuration conf) |
Get http policy.
|
static java.net.URI |
getInfoServer(java.net.InetSocketAddress namenodeAddr,
org.apache.hadoop.conf.Configuration conf,
java.lang.String scheme) |
return server http or https address from the configuration for a
given namenode rpc address.
|
static java.net.URI |
getInfoServerWithDefaultHost(java.lang.String defaultHost,
org.apache.hadoop.conf.Configuration conf,
java.lang.String scheme) |
Lookup the HTTP / HTTPS address of the namenode, and replace its hostname
with defaultHost when it found out that the address is a wildcard / local
address.
|
static java.util.Collection<java.net.URI> |
getInternalNsRpcUris(org.apache.hadoop.conf.Configuration conf) |
Get a URI for each internal nameservice.
|
static float |
getInvalidateWorkPctPerIteration(org.apache.hadoop.conf.Configuration conf) |
Get DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION from configuration.
|
static java.util.Set<java.lang.String> |
getJournalNodeAddresses(org.apache.hadoop.conf.Configuration conf) |
Returns list of Journalnode addresses from the configuration.
|
static java.lang.String |
getNamenodeLifelineAddr(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId) |
Map a logical namenode ID to its lifeline address.
|
static java.lang.String |
getNamenodeNameServiceId(org.apache.hadoop.conf.Configuration conf) |
Get nameservice Id for the
NameNode based on namenode RPC address
matching the local node address. |
static java.lang.String |
getNamenodeServiceAddr(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId) |
Map a logical namenode ID to its service address.
|
static java.lang.String |
getNamenodeWebAddr(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId) |
Map a logical namenode ID to its web address.
|
static java.lang.String |
getNameServiceIdFromAddress(org.apache.hadoop.conf.Configuration conf,
java.net.InetSocketAddress address,
java.lang.String... keys) |
Given the InetSocketAddress this method returns the nameservice Id
corresponding to the key with matching address, by doing a reverse
lookup on the list of nameservices until it finds a match.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getNNLifelineRpcAddressesForCluster(org.apache.hadoop.conf.Configuration conf) |
Returns list of InetSocketAddresses corresponding to lifeline RPC servers
at namenodes from the configuration.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getNNServiceRpcAddresses(org.apache.hadoop.conf.Configuration conf) |
Returns list of InetSocketAddresses corresponding to namenodes from the
configuration.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getNNServiceRpcAddressesForCluster(org.apache.hadoop.conf.Configuration conf) |
Returns list of InetSocketAddresses corresponding to the namenode
that manages this cluster.
|
static java.lang.String |
getOnlyNameServiceIdOrNull(org.apache.hadoop.conf.Configuration conf) |
If the configuration refers to only a single nameservice, return the
name of that nameservice.
|
static byte[][] |
getPathComponents(java.lang.String path) |
Convert a UTF8 string to an array of byte arrays.
|
static int |
getReplWorkMultiplier(org.apache.hadoop.conf.Configuration conf) |
Get DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION from
configuration.
|
static java.util.Map<java.lang.String,java.net.InetSocketAddress> |
getRpcAddressesForNameserviceId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String defaultValue) |
Get all of the RPC addresses of the individual NNs in a given nameservice.
|
static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> |
getSecondaryNameNodeAddresses(org.apache.hadoop.conf.Configuration conf) |
Returns list of InetSocketAddresses of corresponding to secondary namenode
http addresses from the configuration.
|
static java.lang.String |
getSecondaryNameServiceId(org.apache.hadoop.conf.Configuration conf) |
Get nameservice Id for the secondary node based on secondary http address
matching the local node address.
|
static java.security.SecureRandom |
getSecureRandom() |
|
static java.lang.String |
getSpnegoKeytabKey(org.apache.hadoop.conf.Configuration conf,
java.lang.String defaultKey) |
Get SPNEGO keytab Key from configuration
|
static long |
getTransferRateInBytesPerSecond(long bytes,
long durationInNS) |
Calculate the transfer rate in bytes per second.
|
static java.util.Map<java.lang.String,java.net.InetSocketAddress> |
getWebAddressesForNameserviceId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String defaultValue) |
Get all of the Web addresses of the individual NNs in a given nameservice.
|
static org.apache.hadoop.http.HttpServer2.Builder |
httpServerTemplateForNNAndJN(org.apache.hadoop.conf.Configuration conf,
java.net.InetSocketAddress httpAddr,
java.net.InetSocketAddress httpsAddr,
java.lang.String name,
java.lang.String spnegoUserNameKey,
java.lang.String spnegoKeytabFileKey) |
Return a HttpServer.Builder that the journalnode / namenode / secondary
namenode can use to initialize their HTTP / HTTPS server.
|
static boolean |
isParentEntry(java.lang.String path,
java.lang.String parent) |
Check if the given path is the child of parent path.
|
static boolean |
isReservedPathComponent(java.lang.String component) |
Returns if the component is reserved.
|
static boolean |
isValidName(java.lang.String src) |
Whether the pathname is valid.
|
static boolean |
isValidNameForComponent(java.lang.String component) |
Checks if a string is a valid path component.
|
static org.apache.hadoop.conf.Configuration |
loadSslConfiguration(org.apache.hadoop.conf.Configuration conf) |
Load HTTPS-related configuration.
|
static org.apache.hadoop.http.HttpServer2.Builder |
loadSslConfToHttpServerBuilder(org.apache.hadoop.http.HttpServer2.Builder builder,
org.apache.hadoop.conf.Configuration sslConf) |
|
static java.lang.String |
nnAddressesAsString(org.apache.hadoop.conf.Configuration conf) |
|
static boolean |
parseHelpArgument(java.lang.String[] args,
java.lang.String helpDescription,
java.io.PrintStream out,
boolean printGenericCommandUsage) |
Parse the arguments for commands
|
static long |
parseRelativeTime(java.lang.String relTime) |
Converts a relative time string into a duration in milliseconds.
|
static java.lang.String |
path2String(java.lang.Object path) |
Convert an object representing a path to a string.
|
static int |
roundBytesToGB(long bytes) |
Round bytes to GiB (gibibyte)
|
static void |
setGenericConf(org.apache.hadoop.conf.Configuration conf,
java.lang.String nameserviceId,
java.lang.String nnId,
java.lang.String... keys) |
Sets the node specific setting into generic configuration key.
|
static byte[] |
string2Bytes(java.lang.String str) |
Converts a string to a byte array using UTF8 encoding.
|
static java.lang.String |
strings2PathString(java.lang.String[] components) |
Converts a list of path components into a path using Path.SEPARATOR.
|
public static final org.slf4j.Logger LOG
public static final org.apache.commons.cli.Options helpOptions
public static final org.apache.commons.cli.Option helpOpt
public static java.security.SecureRandom getSecureRandom()
public static boolean isValidName(java.lang.String src)
public static boolean isValidNameForComponent(java.lang.String component)
The primary use of this method is for validating paths when loading the FSImage. During normal NN operation, paths are sometimes allowed to contain reserved components.
public static boolean isReservedPathComponent(java.lang.String component)
Note that some components are only reserved under certain directories, e.g. "/.reserved" is reserved, while "/hadoop/.reserved" is not.
public static java.lang.String bytes2String(byte[] bytes)
public static java.lang.String bytes2String(byte[] bytes,
int offset,
int length)
bytes - The bytes to be decoded into charactersoffset - The index of the first byte to decodelength - The number of bytes to decodepublic static byte[] string2Bytes(java.lang.String str)
public static java.lang.String byteArray2PathString(byte[][] components,
int offset,
int length)
public static java.lang.String byteArray2PathString(byte[][] pathComponents)
public static java.lang.String strings2PathString(java.lang.String[] components)
components - Path componentspublic static java.lang.String path2String(java.lang.Object path)
public static byte[][] getPathComponents(java.lang.String path)
public static byte[][] bytes2byteArray(byte[] bytes,
byte separator)
bytes - the array of bytes to splitseparator - the delimiting bytepublic static byte[][] bytes2byteArray(byte[] bytes,
int len,
byte separator)
bytes - the byte array to splitlen - the number of bytes to splitseparator - the delimiting bytepublic static java.lang.String addKeySuffixes(java.lang.String key,
java.lang.String... suffixes)
public static java.util.Map<java.lang.String,java.net.InetSocketAddress> getRpcAddressesForNameserviceId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String defaultValue)
conf - ConfigurationnsId - the nameservice whose NNs addresses we want.defaultValue - default address to return in case key is not found.public static java.util.Set<java.lang.String> getAllNnPrincipals(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
java.io.IOExceptionpublic static java.util.Set<java.lang.String> getJournalNodeAddresses(org.apache.hadoop.conf.Configuration conf)
throws java.net.URISyntaxException,
java.io.IOException
conf - configurationjava.net.URISyntaxExceptionjava.io.IOExceptionpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getBackupNodeAddresses(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
conf - configurationjava.io.IOException - on errorpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getSecondaryNameNodeAddresses(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
conf - configurationjava.io.IOException - on errorpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getNNServiceRpcAddresses(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
conf - configurationjava.io.IOException - on errorpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getNNServiceRpcAddressesForCluster(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
conf - configurationjava.io.IOException - on errorpublic static java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> getNNLifelineRpcAddressesForCluster(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
conf - configurationjava.io.IOException - on errorpublic static java.lang.String getNamenodeLifelineAddr(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId)
conf - ConfigurationnsId - which nameservice nnId is a part of, optionalnnId - the namenode ID to get the service addr forpublic static java.util.List<DFSUtil.ConfiguredNNAddress> flattenAddressMap(java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> map)
DFSUtil.ConfiguredNNAddress instances.public static java.lang.String addressMapToString(java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.net.InetSocketAddress>> map)
public static java.lang.String nnAddressesAsString(org.apache.hadoop.conf.Configuration conf)
public static java.util.Collection<java.net.URI> getInternalNsRpcUris(org.apache.hadoop.conf.Configuration conf)
conf - configurationpublic static java.lang.String getNameServiceIdFromAddress(org.apache.hadoop.conf.Configuration conf,
java.net.InetSocketAddress address,
java.lang.String... keys)
conf - - configurationaddress - - InetSocketAddress for configured communication with NN.
Configured addresses are typically given as URIs, but we may have to
compare against a URI typed in by a human, or the server name may be
aliased, so we compare unambiguous InetSocketAddresses instead of just
comparing URI substrings.keys - - list of configured communication parameters that should
be checked for matches. For example, to compare against RPC addresses,
provide the list DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY. Use the generic parameter keys,
not the NameServiceId-suffixed keys.public static java.net.URI getInfoServer(java.net.InetSocketAddress namenodeAddr,
org.apache.hadoop.conf.Configuration conf,
java.lang.String scheme)
throws java.io.IOException
namenodeAddr - - namenode RPC addressconf - configurationscheme - - the scheme (http / https)java.io.IOExceptionpublic static java.net.URI getInfoServerWithDefaultHost(java.lang.String defaultHost,
org.apache.hadoop.conf.Configuration conf,
java.lang.String scheme)
throws java.io.IOException
defaultHost - The default host name of the namenode.conf - The configurationscheme - HTTP or HTTPSjava.io.IOExceptionpublic static java.lang.String getHttpClientScheme(org.apache.hadoop.conf.Configuration conf)
public static void setGenericConf(org.apache.hadoop.conf.Configuration conf,
java.lang.String nameserviceId,
java.lang.String nnId,
java.lang.String... keys)
conf - Configuration object to lookup specific key and to set the value
to the key passed. Note the conf object is modified.nameserviceId - nameservice Id to construct the node specific key. Pass null if
federation is not configuration.nnId - namenode Id to construct the node specific key. Pass null if
HA is not configured.keys - The key for which node specific value is looked uppublic static int roundBytesToGB(long bytes)
bytes - number of bytespublic static java.lang.String getNamenodeNameServiceId(org.apache.hadoop.conf.Configuration conf)
NameNode based on namenode RPC address
matching the local node address.public static java.lang.String getBackupNameServiceId(org.apache.hadoop.conf.Configuration conf)
public static java.lang.String getSecondaryNameServiceId(org.apache.hadoop.conf.Configuration conf)
public static java.net.InetSocketAddress getBindAddress(org.apache.hadoop.conf.Configuration conf,
java.lang.String confKey,
java.lang.String defaultValue,
java.lang.String bindHostKey)
InetSocketAddress to bind to, for any service.
In case of HA or federation, the address is assumed to specified as
confKey.NAMESPACEID.NAMENODEID as appropriate.conf - configuration.confKey - configuration key (prefix if HA/federation) used to
specify the address for the service.defaultValue - default value for the address.bindHostKey - configuration key (prefix if HA/federation)
specifying host to bind to.public static java.net.URI createUri(java.lang.String scheme,
java.net.InetSocketAddress address)
public static java.net.URI createUri(java.lang.String scheme,
java.lang.String host,
int port)
@Private
@Unstable
public static void addInternalPBProtocol(org.apache.hadoop.conf.Configuration conf,
java.lang.Class<?> protocol,
org.apache.hadoop.thirdparty.protobuf.BlockingService service,
org.apache.hadoop.ipc.RPC.Server server)
throws java.io.IOException
RPC.Server.
This method is for exclusive use by the hadoop libraries, as its signature
changes with the version of the shaded protobuf library it has been built with.conf - configurationprotocol - Protocol interfaceservice - service that implements the protocolserver - RPC server to which the protocol & implementation is
added tojava.io.IOException - failure@Deprecated
public static void addPBProtocol(org.apache.hadoop.conf.Configuration conf,
java.lang.Class<?> protocol,
org.apache.hadoop.thirdparty.protobuf.BlockingService service,
org.apache.hadoop.ipc.RPC.Server server)
throws java.io.IOException
RPC.Server.
Deprecated as it will only reliably compile if an unshaded protobuf library
is also on the classpath.conf - configurationprotocol - Protocol interfaceservice - service that implements the protocolserver - RPC server to which the protocol & implementation is
added tojava.io.IOException@Deprecated
public static void addPBProtocol(org.apache.hadoop.conf.Configuration conf,
java.lang.Class<?> protocol,
com.google.protobuf.BlockingService service,
org.apache.hadoop.ipc.RPC.Server server)
throws java.io.IOException
RPC.Server.
This engine uses Protobuf 2.5.0. Recommended to upgrade to
Protobuf 3.x from hadoop-thirdparty and use
addInternalPBProtocol(Configuration, Class, BlockingService,
RPC.Server).conf - configurationprotocol - Protocol interfaceservice - service that implements the protocolserver - RPC server to which the protocol & implementation is
added tojava.io.IOExceptionpublic static java.lang.String getNamenodeServiceAddr(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId)
conf - ConfigurationnsId - which nameservice nnId is a part of, optionalnnId - the namenode ID to get the service addr forpublic static java.lang.String getNamenodeWebAddr(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String nnId)
conf - ConfigurationnsId - which nameservice nnId is a part of, optionalnnId - the namenode ID to get the service addr forpublic static java.util.Map<java.lang.String,java.net.InetSocketAddress> getWebAddressesForNameserviceId(org.apache.hadoop.conf.Configuration conf,
java.lang.String nsId,
java.lang.String defaultValue)
conf - ConfigurationnsId - the nameservice whose NNs addresses we want.defaultValue - default address to return in case key is not found.public static java.lang.String getOnlyNameServiceIdOrNull(org.apache.hadoop.conf.Configuration conf)
public static boolean parseHelpArgument(java.lang.String[] args,
java.lang.String helpDescription,
java.io.PrintStream out,
boolean printGenericCommandUsage)
args - the argument to be parsedhelpDescription - help information to be printed outout - PrinterprintGenericCommandUsage - whether to print the
generic command usage defined in ToolRunnerpublic static float getInvalidateWorkPctPerIteration(org.apache.hadoop.conf.Configuration conf)
conf - Configurationpublic static int getReplWorkMultiplier(org.apache.hadoop.conf.Configuration conf)
conf - Configurationpublic static java.lang.String getSpnegoKeytabKey(org.apache.hadoop.conf.Configuration conf,
java.lang.String defaultKey)
conf - ConfigurationdefaultKey - default key to be used for config lookuppublic static org.apache.hadoop.http.HttpConfig.Policy getHttpPolicy(org.apache.hadoop.conf.Configuration conf)
public static org.apache.hadoop.http.HttpServer2.Builder loadSslConfToHttpServerBuilder(org.apache.hadoop.http.HttpServer2.Builder builder,
org.apache.hadoop.conf.Configuration sslConf)
public static java.lang.String dateToIso8601String(java.util.Date date)
public static java.lang.String durationToString(long durationMs)
public static long parseRelativeTime(java.lang.String relTime)
throws java.io.IOException
java.io.IOExceptionpublic static org.apache.hadoop.conf.Configuration loadSslConfiguration(org.apache.hadoop.conf.Configuration conf)
public static org.apache.hadoop.http.HttpServer2.Builder httpServerTemplateForNNAndJN(org.apache.hadoop.conf.Configuration conf,
java.net.InetSocketAddress httpAddr,
java.net.InetSocketAddress httpsAddr,
java.lang.String name,
java.lang.String spnegoUserNameKey,
java.lang.String spnegoKeytabFileKey)
throws java.io.IOException
java.io.IOExceptionpublic static void assertAllResultsEqual(java.util.Collection<?> objects)
throws java.lang.AssertionError
objects - the collection of objects to check for equality.java.lang.AssertionErrorpublic static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension createKeyProviderCryptoExtension(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
conf - Configurationjava.io.IOException - if the KeyProvider is improperly specified in
the Configurationpublic static org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier decodeDelegationToken(org.apache.hadoop.security.token.Token<org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier> token)
throws java.io.IOException
token - the tokenjava.io.IOExceptionpublic static void checkProtectedDescendants(FSDirectory fsd, INodesInPath iip) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnresolvedLinkException, org.apache.hadoop.fs.ParentNotDirectoryException
fsd - the namespace tree.iip - directory whose descendants are to be checked.org.apache.hadoop.security.AccessControlException - if a non-empty protected descendant
was found.org.apache.hadoop.fs.ParentNotDirectoryExceptionorg.apache.hadoop.fs.UnresolvedLinkExceptionpublic static java.util.EnumSet<org.apache.hadoop.hdfs.protocol.HdfsFileStatus.Flags> getFlags(boolean isEncrypted,
boolean isErasureCoded,
boolean isSnapShottable,
boolean hasAcl)
isEncrypted - Sets HAS_CRYPTisErasureCoded - Sets HAS_ECisSnapShottable - Sets SNAPSHOT_ENABLEDhasAcl - Sets HAS_ACLpublic static boolean isParentEntry(java.lang.String path,
java.lang.String parent)
path - Path to be check.parent - Parent path.public static void addTransferRateMetric(DataNodeMetrics metrics, long read, long durationInNS)
metrics - metrics for datanodesread - bytes readdurationInNS - read duration in nanosecondspublic static long getTransferRateInBytesPerSecond(long bytes,
long durationInNS)
bytes - bytes readdurationInNS - read duration in nanosecondsCopyright © 2008–2025 Apache Software Foundation. All rights reserved.