java.lang.Comparable<org.apache.hadoop.hdfs.protocol.Block>, org.apache.hadoop.io.Writable, org.apache.hadoop.util.LightWeightGSet.LinkedElementBlockInfoContiguous, BlockInfoStriped@Private
public abstract class BlockInfo
extends org.apache.hadoop.hdfs.protocol.Block
implements org.apache.hadoop.util.LightWeightGSet.LinkedElement
BlockCollection it is part of, and 2) datanodes
where the replicas of the block, or blocks belonging to the erasure coding
block group, are stored.| Modifier and Type | Field | Description |
|---|---|---|
static BlockInfo[] |
EMPTY_ARRAY |
|
protected java.lang.Object[] |
triplets |
This array contains triplets of references.
|
| Constructor | Description |
|---|---|
BlockInfo(short size) |
Construct an entry for blocksmap
|
BlockInfo(org.apache.hadoop.hdfs.protocol.Block blk,
short size) |
| Modifier and Type | Method | Description |
|---|---|---|
void |
convertToBlockUnderConstruction(HdfsServerConstants.BlockUCState s,
DatanodeStorageInfo[] targets) |
Add/Update the under construction feature.
|
void |
delete() |
|
boolean |
equals(java.lang.Object obj) |
|
long |
getBlockCollectionId() |
|
abstract org.apache.hadoop.hdfs.protocol.BlockType |
getBlockType() |
|
HdfsServerConstants.BlockUCState |
getBlockUCState() |
|
int |
getCapacity() |
|
DatanodeDescriptor |
getDatanode(int index) |
|
org.apache.hadoop.util.LightWeightGSet.LinkedElement |
getNext() |
|
short |
getReplication() |
|
java.util.Iterator<DatanodeStorageInfo> |
getStorageInfos() |
|
BlockUnderConstructionFeature |
getUnderConstructionFeature() |
|
int |
hashCode() |
|
boolean |
isComplete() |
Is this block complete?
|
boolean |
isCompleteOrCommitted() |
|
boolean |
isDeleted() |
|
abstract boolean |
isStriped() |
|
boolean |
isUnderRecovery() |
|
BlockInfo |
moveBlockToHead(BlockInfo head,
DatanodeStorageInfo storage,
int curIndex,
int headIndex) |
Remove this block from the list of blocks related to the specified
DatanodeDescriptor.
|
abstract int |
numNodes() |
Count the number of data-nodes the block currently belongs to (i.e., NN
has received block reports from the DN).
|
void |
setBlockCollectionId(long id) |
|
java.util.List<org.apache.hadoop.hdfs.server.blockmanagement.ReplicaUnderConstruction> |
setGenerationStampAndVerifyReplicas(long genStamp) |
Process the recorded replicas.
|
void |
setNext(org.apache.hadoop.util.LightWeightGSet.LinkedElement next) |
|
void |
setReplication(short repl) |
appendStringTo, compareTo, filename2id, getBlockId, getBlockId, getBlockName, getGenerationStamp, getGenerationStamp, getNumBytes, isBlockFilename, isMetaFilename, matchingIdAndGenStamp, metaToBlockFile, readFields, readId, set, setBlockId, setGenerationStamp, setNumBytes, toString, toString, write, writeIdpublic static final BlockInfo[] EMPTY_ARRAY
protected java.lang.Object[] triplets
DatanodeStorageInfo and triplets[3*i+1] and triplets[3*i+2] are
references to the previous and the next blocks, respectively, in the list
of blocks belonging to this storage.
Using previous and next in Object triplets is done instead of a
LinkedList list to efficiently use memory. With LinkedList the cost
per replica is 42 bytes (LinkedList#Entry object per replica) versus 16
bytes using the triplets.public BlockInfo(short size)
size - the block's replication factor, or the total number of blocks
in the block grouppublic BlockInfo(org.apache.hadoop.hdfs.protocol.Block blk,
short size)
public short getReplication()
public void setReplication(short repl)
public long getBlockCollectionId()
public void setBlockCollectionId(long id)
public void delete()
public boolean isDeleted()
public java.util.Iterator<DatanodeStorageInfo> getStorageInfos()
public DatanodeDescriptor getDatanode(int index)
public int getCapacity()
public abstract int numNodes()
public abstract boolean isStriped()
public abstract org.apache.hadoop.hdfs.protocol.BlockType getBlockType()
public BlockInfo moveBlockToHead(BlockInfo head, DatanodeStorageInfo storage, int curIndex, int headIndex)
public int hashCode()
hashCode in class org.apache.hadoop.hdfs.protocol.Blockpublic boolean equals(java.lang.Object obj)
equals in class org.apache.hadoop.hdfs.protocol.Blockpublic org.apache.hadoop.util.LightWeightGSet.LinkedElement getNext()
getNext in interface org.apache.hadoop.util.LightWeightGSet.LinkedElementpublic void setNext(org.apache.hadoop.util.LightWeightGSet.LinkedElement next)
setNext in interface org.apache.hadoop.util.LightWeightGSet.LinkedElementpublic BlockUnderConstructionFeature getUnderConstructionFeature()
public HdfsServerConstants.BlockUCState getBlockUCState()
public boolean isComplete()
HdfsServerConstants.BlockUCState.COMPLETEpublic boolean isUnderRecovery()
public final boolean isCompleteOrCommitted()
public void convertToBlockUnderConstruction(HdfsServerConstants.BlockUCState s, DatanodeStorageInfo[] targets)
public java.util.List<org.apache.hadoop.hdfs.server.blockmanagement.ReplicaUnderConstruction> setGenerationStampAndVerifyReplicas(long genStamp)
genStamp - The final generation stamp for the block.Copyright © 2008–2025 Apache Software Foundation. All rights reserved.