@LimitedPrivate("HDFS")
public class CacheManager
extends java.lang.Object
| Modifier and Type | Class | Description |
|---|---|---|
static class |
CacheManager.PersistState |
| Modifier and Type | Field | Description |
|---|---|---|
static org.slf4j.Logger |
LOG |
| Modifier and Type | Method | Description |
|---|---|---|
org.apache.hadoop.hdfs.protocol.CachePoolInfo |
addCachePool(org.apache.hadoop.hdfs.protocol.CachePoolInfo info) |
Create a cache pool.
|
org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo |
addDirective(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo info,
FSPermissionChecker pc,
java.util.EnumSet<org.apache.hadoop.fs.CacheFlag> flags) |
|
void |
clearDirectiveStats() |
|
org.apache.hadoop.util.GSet<CachedBlock,CachedBlock> |
getCachedBlocks() |
|
java.util.Collection<CacheDirective> |
getCacheDirectives() |
|
java.util.Collection<CachePool> |
getCachePools() |
|
java.lang.Thread |
getCacheReplicationMonitor() |
|
long |
getMaxLockTimeMs() |
|
long |
getSleepTimeMs() |
|
boolean |
isCheckLockTimeEnable() |
|
boolean |
isEnabled() |
|
org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries<org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry> |
listCacheDirectives(long prevId,
org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo filter,
FSPermissionChecker pc) |
|
org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries<org.apache.hadoop.hdfs.protocol.CachePoolEntry> |
listCachePools(FSPermissionChecker pc,
java.lang.String prevKey) |
|
void |
loadState(CacheManager.PersistState s) |
|
void |
loadStateCompat(java.io.DataInput in) |
Reloads CacheManager state from the passed DataInput.
|
void |
modifyCachePool(org.apache.hadoop.hdfs.protocol.CachePoolInfo info) |
Modify a cache pool.
|
void |
modifyDirective(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo info,
FSPermissionChecker pc,
java.util.EnumSet<org.apache.hadoop.fs.CacheFlag> flags) |
|
void |
processCacheReport(org.apache.hadoop.hdfs.protocol.DatanodeID datanodeID,
java.util.List<java.lang.Long> blockIds) |
|
void |
removeCachePool(java.lang.String poolName) |
Remove a cache pool.
|
void |
removeDirective(long id,
FSPermissionChecker pc) |
|
CacheManager.PersistState |
saveState() |
|
void |
saveStateCompat(java.io.DataOutputStream out,
java.lang.String sdPath) |
Saves the current state of the CacheManager to the DataOutput.
|
void |
setCachedLocations(org.apache.hadoop.hdfs.protocol.LocatedBlocks locations) |
|
void |
startMonitorThread() |
|
void |
stopMonitorThread() |
|
void |
waitForRescanIfNeeded() |
public boolean isEnabled()
public boolean isCheckLockTimeEnable()
public long getMaxLockTimeMs()
public long getSleepTimeMs()
public void startMonitorThread()
public void stopMonitorThread()
public void clearDirectiveStats()
public java.util.Collection<CachePool> getCachePools()
public java.util.Collection<CacheDirective> getCacheDirectives()
@VisibleForTesting public org.apache.hadoop.util.GSet<CachedBlock,CachedBlock> getCachedBlocks()
public org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo addDirective(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo info,
FSPermissionChecker pc,
java.util.EnumSet<org.apache.hadoop.fs.CacheFlag> flags)
throws java.io.IOException
java.io.IOExceptionpublic void modifyDirective(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo info,
FSPermissionChecker pc,
java.util.EnumSet<org.apache.hadoop.fs.CacheFlag> flags)
throws java.io.IOException
java.io.IOExceptionpublic void removeDirective(long id,
FSPermissionChecker pc)
throws java.io.IOException
java.io.IOExceptionpublic org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries<org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry> listCacheDirectives(long prevId,
org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo filter,
FSPermissionChecker pc)
throws java.io.IOException
java.io.IOExceptionpublic org.apache.hadoop.hdfs.protocol.CachePoolInfo addCachePool(org.apache.hadoop.hdfs.protocol.CachePoolInfo info)
throws java.io.IOException
info - The info for the cache pool to create.java.io.IOExceptionpublic void modifyCachePool(org.apache.hadoop.hdfs.protocol.CachePoolInfo info)
throws java.io.IOException
info - The info for the cache pool to modify.java.io.IOExceptionpublic void removeCachePool(java.lang.String poolName)
throws java.io.IOException
poolName - The name for the cache pool to remove.java.io.IOExceptionpublic org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries<org.apache.hadoop.hdfs.protocol.CachePoolEntry> listCachePools(FSPermissionChecker pc, java.lang.String prevKey)
public void setCachedLocations(org.apache.hadoop.hdfs.protocol.LocatedBlocks locations)
public final void processCacheReport(org.apache.hadoop.hdfs.protocol.DatanodeID datanodeID,
java.util.List<java.lang.Long> blockIds)
throws java.io.IOException
java.io.IOExceptionpublic void saveStateCompat(java.io.DataOutputStream out,
java.lang.String sdPath)
throws java.io.IOException
out - DataOutput to persist statesdPath - path of the storage directoryjava.io.IOExceptionpublic CacheManager.PersistState saveState() throws java.io.IOException
java.io.IOExceptionpublic void loadStateCompat(java.io.DataInput in)
throws java.io.IOException
in - DataInput from which to restore statejava.io.IOExceptionpublic void loadState(CacheManager.PersistState s) throws java.io.IOException
java.io.IOExceptionpublic void waitForRescanIfNeeded()
@VisibleForTesting public java.lang.Thread getCacheReplicationMonitor()
Copyright © 2008–2025 Apache Software Foundation. All rights reserved.