public class HTableImpl11 extends HTableImpl
HTableImpl.FamilyInfoautoFlush, CONFIG_PARAM_FLUSH_ON_READ, flushOnRead, maprTable, tableName| Constructor and Description |
|---|
HTableImpl11(org.apache.hadoop.conf.Configuration conf,
byte[] tableName)
Creates an object to access a MapR table.
|
HTableImpl11(org.apache.hadoop.conf.Configuration conf,
byte[] tableName,
org.apache.hadoop.hbase.client.BufferedMutator bm,
org.apache.hadoop.hbase.client.BufferedMutator.ExceptionListener listener,
java.util.concurrent.ExecutorService pool) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
checkAndMutateImpl(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.RowMutations rm,
boolean throwerr) |
void |
close() |
void |
put(java.util.List<org.apache.hadoop.hbase.client.Put> puts) |
void |
put(org.apache.hadoop.hbase.client.Put put) |
com.mapr.fs.jni.MapRPut |
syncMutateToMapRPut(byte[] row,
org.apache.hadoop.hbase.client.Mutation mut) |
append, batch, batch, checkAndDelete, checkAndDelete, checkAndMutate, checkAndPut, checkAndPut, checkMutation, delete, delete, exists, exists, flushCommits, get, get, getConfiguration, getFamilyInfo, getRegionLocation, getRegionLocations, getRowOrBefore, getScanner, getStartEndKeys, getTableDescriptor, getTableName, increment, incrementColumnValue, incrementColumnValue, incrementColumnValue, isAutoFlush, mutateRow, MutateToMapRPut, setAutoFlush, setAutoFlush, setFlushOnRead, shouldFlushOnReadbatchCoprocessorService, batchCoprocessorService, clearRegionCache, coprocessorService, coprocessorService, coprocessorService, getEndKeys, getRegionLocation, getRegionLocation, getScanner, getScanner, getStartKeys, getWriteBufferSize, setWriteBufferSizepublic HTableImpl11(org.apache.hadoop.conf.Configuration conf,
byte[] tableName)
throws java.io.IOException
conf - Configuration object to use.tableName - Name of the table.java.io.IOException - if a remote or network exception occurspublic HTableImpl11(org.apache.hadoop.conf.Configuration conf,
byte[] tableName,
org.apache.hadoop.hbase.client.BufferedMutator bm,
org.apache.hadoop.hbase.client.BufferedMutator.ExceptionListener listener,
java.util.concurrent.ExecutorService pool)
throws java.io.IOException
java.io.IOExceptionpublic void close()
throws java.io.IOException
close in class HTableImpljava.io.IOExceptionpublic void put(org.apache.hadoop.hbase.client.Put put)
throws java.io.InterruptedIOException
put in class HTableImpljava.io.InterruptedIOExceptionpublic void put(java.util.List<org.apache.hadoop.hbase.client.Put> puts)
throws java.io.InterruptedIOException
put in class HTableImpljava.io.InterruptedIOExceptionpublic com.mapr.fs.jni.MapRPut syncMutateToMapRPut(byte[] row,
org.apache.hadoop.hbase.client.Mutation mut)
throws java.io.IOException
java.io.IOExceptionpublic boolean checkAndMutateImpl(byte[] row,
byte[] family,
byte[] qualifier,
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp compareOp,
byte[] value,
org.apache.hadoop.hbase.client.RowMutations rm,
boolean throwerr)
throws java.io.IOException
checkAndMutateImpl in class HTableImpljava.io.IOException