public class HTableImpl
extends org.apache.hadoop.hbase.client.mapr.AbstractHTable
| Modifier and Type | Class and Description |
|---|---|
class |
HTableImpl.FamilyInfo |
| Modifier and Type | Field and Description |
|---|---|
protected boolean |
autoFlush |
static java.lang.String |
CONFIG_PARAM_FLUSH_ON_READ |
protected boolean |
flushOnRead |
protected com.mapr.fs.MapRHTable |
maprTable |
protected byte[] |
tableName
Stores table path in a byte array
|
| Constructor and Description |
|---|
HTableImpl(org.apache.hadoop.conf.Configuration conf,
byte[] tableName)
Creates an object to access a MapR table.
|
| Modifier and Type | Method and Description |
|---|---|
org.apache.hadoop.hbase.client.Result |
append(org.apache.hadoop.hbase.client.Append append) |
java.lang.Object[] |
batch(java.util.List<? extends org.apache.hadoop.hbase.client.Row> actions) |
void |
batch(java.util.List<? extends org.apache.hadoop.hbase.client.Row> actions,
java.lang.Object[] results) |
boolean |
checkAndDelete(byte[] row,
byte[] family,
byte[] qualifier,
byte[] value,
org.apache.hadoop.hbase.client.Delete delete) |
boolean |
checkAndDelete(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.Delete delete) |
boolean |
checkAndMutate(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.RowMutations rm) |
boolean |
checkAndMutateImpl(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.RowMutations rm,
boolean throwerr) |
boolean |
checkAndPut(byte[] row,
byte[] family,
byte[] qualifier,
byte[] value,
org.apache.hadoop.hbase.client.Put put) |
boolean |
checkAndPut(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.Put put) |
protected void |
checkMutation(org.apache.hadoop.hbase.client.Mutation m) |
void |
close() |
void |
delete(org.apache.hadoop.hbase.client.Delete delete) |
void |
delete(java.util.List<org.apache.hadoop.hbase.client.Delete> deletes) |
boolean |
exists(org.apache.hadoop.hbase.client.Get get) |
java.lang.Boolean[] |
exists(java.util.List<org.apache.hadoop.hbase.client.Get> gets) |
void |
flushCommits() |
org.apache.hadoop.hbase.client.Result |
get(org.apache.hadoop.hbase.client.Get get) |
org.apache.hadoop.hbase.client.Result[] |
get(java.util.List<org.apache.hadoop.hbase.client.Get> gets) |
org.apache.hadoop.conf.Configuration |
getConfiguration() |
HTableImpl.FamilyInfo |
getFamilyInfo(byte[] row,
byte[] family) |
org.apache.hadoop.hbase.HRegionLocation |
getRegionLocation(byte[] row) |
java.util.NavigableMap<org.apache.hadoop.hbase.HRegionInfo,org.apache.hadoop.hbase.ServerName> |
getRegionLocations() |
org.apache.hadoop.hbase.client.Result |
getRowOrBefore(byte[] row,
byte[] family) |
org.apache.hadoop.hbase.client.ResultScanner |
getScanner(org.apache.hadoop.hbase.client.Scan scan) |
org.apache.hadoop.hbase.util.Pair<byte[][],byte[][]> |
getStartEndKeys() |
org.apache.hadoop.hbase.HTableDescriptor |
getTableDescriptor() |
byte[] |
getTableName()
Returns the complete table path as a byte array
|
org.apache.hadoop.hbase.client.Result |
increment(org.apache.hadoop.hbase.client.Increment increment) |
long |
incrementColumnValue(byte[] row,
byte[] family,
byte[] qualifier,
long amount) |
long |
incrementColumnValue(byte[] row,
byte[] family,
byte[] qualifier,
long amount,
boolean writeToWAL) |
long |
incrementColumnValue(byte[] row,
byte[] family,
byte[] qualifier,
long amount,
org.apache.hadoop.hbase.client.Durability durability) |
boolean |
isAutoFlush() |
void |
mutateRow(org.apache.hadoop.hbase.client.RowMutations rm) |
com.mapr.fs.jni.MapRPut |
MutateToMapRPut(byte[] row,
org.apache.hadoop.hbase.client.Mutation mut) |
void |
put(java.util.List<org.apache.hadoop.hbase.client.Put> puts) |
void |
put(org.apache.hadoop.hbase.client.Put put) |
void |
setAutoFlush(boolean autoFlush) |
void |
setAutoFlush(boolean autoFlush,
boolean clearBufferOnFail) |
void |
setFlushOnRead(boolean val) |
boolean |
shouldFlushOnRead() |
batchCoprocessorService, batchCoprocessorService, clearRegionCache, coprocessorService, coprocessorService, coprocessorService, getEndKeys, getRegionLocation, getRegionLocation, getScanner, getScanner, getStartKeys, getWriteBufferSize, setWriteBufferSizepublic static final java.lang.String CONFIG_PARAM_FLUSH_ON_READ
protected boolean autoFlush
protected boolean flushOnRead
protected final com.mapr.fs.MapRHTable maprTable
protected byte[] tableName
public HTableImpl(org.apache.hadoop.conf.Configuration conf,
byte[] tableName)
throws java.io.IOException
conf - Configuration object to use.tableName - Name of the table.java.io.IOException - if a remote or network exception occurspublic byte[] getTableName()
getTableName in class org.apache.hadoop.hbase.client.mapr.AbstractHTablepublic org.apache.hadoop.conf.Configuration getConfiguration()
getConfiguration in class org.apache.hadoop.hbase.client.mapr.AbstractHTablepublic void flushCommits()
throws java.io.InterruptedIOException
flushCommits in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.InterruptedIOExceptionpublic void close()
throws java.io.IOException
close in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.HTableDescriptor getTableDescriptor()
throws java.io.IOException
getTableDescriptor in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic boolean exists(org.apache.hadoop.hbase.client.Get get)
throws java.io.IOException
exists in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic java.lang.Boolean[] exists(java.util.List<org.apache.hadoop.hbase.client.Get> gets)
throws java.io.IOException
exists in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic void batch(java.util.List<? extends org.apache.hadoop.hbase.client.Row> actions,
java.lang.Object[] results)
throws java.io.IOException,
java.lang.InterruptedException
batch in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionjava.lang.InterruptedExceptionpublic java.lang.Object[] batch(java.util.List<? extends org.apache.hadoop.hbase.client.Row> actions)
throws java.io.IOException,
java.lang.InterruptedException
batch in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.hbase.client.Result get(org.apache.hadoop.hbase.client.Get get)
throws java.io.IOException
get in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.client.Result[] get(java.util.List<org.apache.hadoop.hbase.client.Get> gets)
throws java.io.IOException
get in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.client.Result getRowOrBefore(byte[] row,
byte[] family)
throws java.io.IOException
getRowOrBefore in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.client.ResultScanner getScanner(org.apache.hadoop.hbase.client.Scan scan)
throws java.io.IOException
getScanner in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic void put(org.apache.hadoop.hbase.client.Put put)
throws java.io.InterruptedIOException
put in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.InterruptedIOExceptionpublic void put(java.util.List<org.apache.hadoop.hbase.client.Put> puts)
throws java.io.InterruptedIOException
put in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.InterruptedIOExceptionpublic HTableImpl.FamilyInfo getFamilyInfo(byte[] row, byte[] family) throws org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException, java.io.IOException
org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyExceptionjava.io.IOExceptionpublic com.mapr.fs.jni.MapRPut MutateToMapRPut(byte[] row,
org.apache.hadoop.hbase.client.Mutation mut)
throws java.io.IOException
java.io.IOExceptionpublic boolean checkAndPut(byte[] row,
byte[] family,
byte[] qualifier,
byte[] value,
org.apache.hadoop.hbase.client.Put put)
throws org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException,
java.io.IOException
checkAndPut in class org.apache.hadoop.hbase.client.mapr.AbstractHTableorg.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyExceptionjava.io.IOExceptionpublic boolean checkAndPut(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.Put put)
throws java.io.IOException
checkAndPut in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic void delete(org.apache.hadoop.hbase.client.Delete delete)
throws java.io.IOException
delete in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic void delete(java.util.List<org.apache.hadoop.hbase.client.Delete> deletes)
throws java.io.IOException
delete in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic boolean checkAndDelete(byte[] row,
byte[] family,
byte[] qualifier,
byte[] value,
org.apache.hadoop.hbase.client.Delete delete)
throws org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException,
java.io.IOException
checkAndDelete in class org.apache.hadoop.hbase.client.mapr.AbstractHTableorg.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyExceptionjava.io.IOExceptionpublic boolean checkAndDelete(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.Delete delete)
throws java.io.IOException
checkAndDelete in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic void mutateRow(org.apache.hadoop.hbase.client.RowMutations rm)
throws java.io.IOException
mutateRow in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.client.Result append(org.apache.hadoop.hbase.client.Append append)
throws java.io.IOException
append in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.client.Result increment(org.apache.hadoop.hbase.client.Increment increment)
throws java.io.IOException
increment in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic long incrementColumnValue(byte[] row,
byte[] family,
byte[] qualifier,
long amount)
throws java.io.IOException
incrementColumnValue in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic long incrementColumnValue(byte[] row,
byte[] family,
byte[] qualifier,
long amount,
boolean writeToWAL)
throws java.io.IOException
incrementColumnValue in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic long incrementColumnValue(byte[] row,
byte[] family,
byte[] qualifier,
long amount,
org.apache.hadoop.hbase.client.Durability durability)
throws java.io.IOException
incrementColumnValue in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic void setFlushOnRead(boolean val)
public boolean shouldFlushOnRead()
public void setAutoFlush(boolean autoFlush)
setAutoFlush in class org.apache.hadoop.hbase.client.mapr.AbstractHTablepublic void setAutoFlush(boolean autoFlush,
boolean clearBufferOnFail)
setAutoFlush in class org.apache.hadoop.hbase.client.mapr.AbstractHTablepublic boolean isAutoFlush()
isAutoFlush in class org.apache.hadoop.hbase.client.mapr.AbstractHTablepublic org.apache.hadoop.hbase.HRegionLocation getRegionLocation(byte[] row)
throws java.io.IOException
getRegionLocation in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic org.apache.hadoop.hbase.util.Pair<byte[][],byte[][]> getStartEndKeys()
throws java.io.IOException
getStartEndKeys in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic java.util.NavigableMap<org.apache.hadoop.hbase.HRegionInfo,org.apache.hadoop.hbase.ServerName> getRegionLocations()
throws java.io.IOException
getRegionLocations in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionprotected void checkMutation(org.apache.hadoop.hbase.client.Mutation m)
throws java.io.IOException
java.io.IOExceptionpublic boolean checkAndMutate(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.RowMutations rm)
throws java.io.IOException
checkAndMutate in class org.apache.hadoop.hbase.client.mapr.AbstractHTablejava.io.IOExceptionpublic boolean checkAndMutateImpl(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.RowMutations rm,
boolean throwerr)
throws java.io.IOException
java.io.IOException