Codota Logo
ThriftServerRunner$HBaseHandler.closeTable
Code IndexAdd Codota to your IDE (free)

How to use
closeTable
method
in
org.apache.hadoop.hbase.thrift.ThriftServerRunner$HBaseHandler

Best Java code snippets using org.apache.hadoop.hbase.thrift.ThriftServerRunner$HBaseHandler.closeTable (Showing top 20 results out of 315)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
Dictionary d =
  • Codota Iconnew Hashtable()
  • Codota IconBundle bundle;bundle.getHeaders()
  • Codota Iconnew Properties()
  • Smart code suggestions by Codota
}
origin: com.aliyun.hbase/alihbase-thrift

@Override
public void deleteAllRowTs(
  ByteBuffer tableName, ByteBuffer row, long timestamp,
  Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
 Table table = null;
 try {
  table = getTable(tableName);
  Delete delete  = new Delete(getBytes(row), timestamp);
  addAttributes(delete, attributes);
  table.delete(delete);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public void deleteAllRowTs(
  ByteBuffer tableName, ByteBuffer row, long timestamp,
  Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
 Table table = null;
 try {
  table = getTable(tableName);
  Delete delete  = new Delete(getBytes(row), timestamp);
  addAttributes(delete, attributes);
  table.delete(delete);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
  ByteBuffer tableName) throws IOError, TException {
 Table table = null;
 try {
  TreeMap<ByteBuffer, ColumnDescriptor> columns = new TreeMap<>();
  table = getTable(tableName);
  HTableDescriptor desc = table.getTableDescriptor();
  for (HColumnDescriptor e : desc.getFamilies()) {
   ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
   columns.put(col.name, col);
  }
  return columns;
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row,
  byte [] family, byte [] qualifier, long amount)
  throws IOError, IllegalArgument, TException {
 Table table = null;
 try {
  table = getTable(tableName);
  return table.incrementColumnValue(
    getBytes(row), family, qualifier, amount);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

@Override
public void increment(TIncrement tincrement) throws IOError, TException {
 if (tincrement.getRow().length == 0 || tincrement.getTable().length == 0) {
  throw new TException("Must supply a table and a row key; can't increment");
 }
 if (conf.getBoolean(COALESCE_INC_KEY, false)) {
  this.coalescer.queueIncrement(tincrement);
  return;
 }
 Table table = null;
 try {
  table = getTable(tincrement.getTable());
  Increment inc = ThriftUtilities.incrementFromThrift(tincrement);
  table.increment(inc);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
  closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

@Override
public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
  ByteBuffer tableName) throws IOError, TException {
 Table table = null;
 try {
  TreeMap<ByteBuffer, ColumnDescriptor> columns = new TreeMap<>();
  table = getTable(tableName);
  HTableDescriptor desc = table.getTableDescriptor();
  for (HColumnDescriptor e : desc.getFamilies()) {
   ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
   columns.put(col.name, col);
  }
  return columns;
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public void deleteAllTs(ByteBuffer tableName,
            ByteBuffer row,
            ByteBuffer column,
  long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
 Table table = null;
 try {
  table = getTable(tableName);
  Delete delete  = new Delete(getBytes(row));
  addAttributes(delete, attributes);
  byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
  if (famAndQf.length == 1) {
   delete.addFamily(famAndQf[0], timestamp);
  } else {
   delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
  }
  table.delete(delete);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public List<TCell> append(TAppend tappend) throws IOError, TException {
 if (tappend.getRow().length == 0 || tappend.getTable().length == 0) {
  throw new TException("Must supply a table and a row key; can't append");
 }
 Table table = null;
 try {
  table = getTable(tappend.getTable());
  Append append = ThriftUtilities.appendFromThrift(tappend);
  Result result = table.append(append);
  return ThriftUtilities.cellFromHBase(result.rawCells());
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
   closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public void increment(TIncrement tincrement) throws IOError, TException {
 if (tincrement.getRow().length == 0 || tincrement.getTable().length == 0) {
  throw new TException("Must supply a table and a row key; can't increment");
 }
 if (conf.getBoolean(COALESCE_INC_KEY, false)) {
  this.coalescer.queueIncrement(tincrement);
  return;
 }
 Table table = null;
 try {
  table = getTable(tincrement.getTable());
  Increment inc = ThriftUtilities.incrementFromThrift(tincrement);
  table.increment(inc);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
  closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

@Override
public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
  List<ByteBuffer> columns,
  Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
 Table table = null;
 try {
  table = getTable(tableName);
  Scan scan = new Scan(getBytes(startRow));
  addAttributes(scan, attributes);
  if(columns != null && columns.size() != 0) {
   for(ByteBuffer column : columns) {
    byte [][] famQf = CellUtil.parseColumn(getBytes(column));
    if(famQf.length == 1) {
     scan.addFamily(famQf[0]);
    } else {
     scan.addColumn(famQf[0], famQf[1]);
    }
   }
  }
  return addScanner(table.getScanner(scan), false);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
  closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

@Override
public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
  List<ByteBuffer> columns, long timestamp,
  Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
 Table table = null;
 try {
  table = getTable(tableName);
  Scan scan = new Scan(getBytes(startRow));
  addAttributes(scan, attributes);
  scan.setTimeRange(0, timestamp);
  if (columns != null && columns.size() != 0) {
   for (ByteBuffer column : columns) {
    byte [][] famQf = CellUtil.parseColumn(getBytes(column));
    if(famQf.length == 1) {
     scan.addFamily(famQf[0]);
    } else {
     scan.addColumn(famQf[0], famQf[1]);
    }
   }
  }
  return addScanner(table.getScanner(scan), false);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
  List<ByteBuffer> columns, long timestamp,
  Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
 Table table = null;
 try {
  table = getTable(tableName);
  Scan scan = new Scan(getBytes(startRow));
  addAttributes(scan, attributes);
  scan.setTimeRange(0, timestamp);
  if (columns != null && !columns.isEmpty()) {
   for (ByteBuffer column : columns) {
    byte [][] famQf = CellUtil.parseColumn(getBytes(column));
    if(famQf.length == 1) {
     scan.addFamily(famQf[0]);
    } else {
     scan.addColumn(famQf[0], famQf[1]);
    }
   }
  }
  return addScanner(table.getScanner(scan), false);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

@Override
public int scannerOpenWithStopTs(ByteBuffer tableName, ByteBuffer startRow,
  ByteBuffer stopRow, List<ByteBuffer> columns, long timestamp,
  Map<ByteBuffer, ByteBuffer> attributes)
  throws IOError, TException {
 Table table = null;
 try {
  table = getTable(tableName);
  Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
  addAttributes(scan, attributes);
  scan.setTimeRange(0, timestamp);
  if (columns != null && !columns.isEmpty()) {
   for (ByteBuffer column : columns) {
    byte [][] famQf = CellUtil.parseColumn(getBytes(column));
    if(famQf.length == 1) {
     scan.addFamily(famQf[0]);
    } else {
     scan.addColumn(famQf[0], famQf[1]);
    }
   }
  }
  scan.setTimeRange(0, timestamp);
  return addScanner(table.getScanner(scan), false);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
  closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

@Override
public List<TCell> append(TAppend tappend) throws IOError, TException {
 if (tappend.getRow().length == 0 || tappend.getTable().length == 0) {
  throw new TException("Must supply a table and a row key; can't append");
 }
 Table table = null;
 try {
  table = getTable(tappend.getTable());
  Append append = ThriftUtilities.appendFromThrift(tappend);
  Result result = table.append(append);
  return ThriftUtilities.cellFromHBase(result.rawCells());
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally{
   closeTable(table);
 }
}
origin: com.aliyun.hbase/alihbase-thrift

 throw getIOError(e);
} finally{
 closeTable(table);
origin: org.apache.hbase/hbase-thrift

 throw getIOError(e);
} finally{
 closeTable(table);
origin: org.apache.hbase/hbase-thrift

protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row,
  byte [] family, byte [] qualifier, long amount)
  throws IOError, IllegalArgument, TException {
 Table table = null;
 try {
  table = getTable(tableName);
  return table.incrementColumnValue(
    getBytes(row), family, qualifier, amount);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
origin: org.apache.hbase/hbase-thrift

 throw getIOError(e);
} finally {
 closeTable(table);
origin: org.apache.hbase/hbase-thrift

 throw getIOError(e);
} finally{
 closeTable(table);
origin: com.aliyun.hbase/alihbase-thrift

@Override
public void deleteAllTs(ByteBuffer tableName,
            ByteBuffer row,
            ByteBuffer column,
  long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
 Table table = null;
 try {
  table = getTable(tableName);
  Delete delete  = new Delete(getBytes(row));
  addAttributes(delete, attributes);
  byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
  if (famAndQf.length == 1) {
   delete.addFamily(famAndQf[0], timestamp);
  } else {
   delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
  }
  table.delete(delete);
 } catch (IOException e) {
  LOG.warn(e.getMessage(), e);
  throw getIOError(e);
 } finally {
  closeTable(table);
 }
}
org.apache.hadoop.hbase.thriftThriftServerRunner$HBaseHandlercloseTable

Popular methods of ThriftServerRunner$HBaseHandler

  • <init>
  • deleteAllRowTs
  • deleteAllTs
  • get
    Note: this internal interface is slightly different from public APIs in regard to handling of the qu
  • getRowWithColumnsTs
  • getVerTs
    Note: this internal interface is slightly different from public APIs in regard to handling of the qu
  • increment
  • mutateRowTs
  • mutateRowsTs
  • addScanner
    Assigns a unique ID to the scanner and adds the mapping to an internal hash-map.
  • atomicIncrement
  • getRowsWithColumnsTs
  • atomicIncrement,
  • getRowsWithColumnsTs,
  • getScanner,
  • getTable,
  • getVer,
  • removeScanner,
  • scannerGetList,
  • append,
  • checkAndPut

Popular in Java

  • Making http requests using okhttp
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • onRequestPermissionsResult (Fragment)
  • getSupportFragmentManager (FragmentActivity)
    Return the FragmentManager for interacting with fragments associated with this activity.
  • ObjectMapper (com.fasterxml.jackson.databind)
    This mapper (or, data binder, or codec) provides functionality for converting between Java objects (
  • CountDownLatch (java.util.concurrent)
    A synchronization aid that allows one or more threads to wait until a set of operations being perfor
  • ThreadPoolExecutor (java.util.concurrent)
    An ExecutorService that executes each submitted task using one of possibly several pooled threads, n
  • Cipher (javax.crypto)
    This class provides access to implementations of cryptographic ciphers for encryption and decryption
  • Table (org.hibernate.mapping)
    A relational table
  • Logger (org.slf4j)
    The main user interface to logging. It is expected that logging takes place through concrete impleme
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now