private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> multigetSliceInternal(final Iterable<K> keys, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { try { List<K> keyList = new ArrayList<K>(); Iterators.addAll(keyList, keys.iterator()); return cassandra.multiget_slice(keySerializer.toBytesList(keyList), columnParent, (workingSlicePredicate == null ? activeSlicePredicate.setColumnNames(columnValueSerializers.keySet()).toThrift() : workingSlicePredicate.toThrift()), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> sliceInternal(final K key, final HSlicePredicate<SN> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(); try { ByteBuffer sKey = keySerializer.toByteBuffer(key); cosc.put(sKey, cassandra.get_slice(sKey, columnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType)))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> multigetSliceInternal(final List<K> keys, final ColumnParent workingColumnParent, final HSlicePredicate<SN> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc; try { List<ByteBuffer> sKeys = keySerializer.toBytesList(keys); cosc = cassandra.multiget_slice(sKeys, workingColumnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> sliceInternal(final K key, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(); try { ByteBuffer sKey = keySerializer.toByteBuffer(key); cosc.put(sKey, cassandra.get_slice(sKey, columnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily)))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private void doExecuteSlice() { keyspace.doExecuteOperation(new Operation<Column>(OperationType.READ) { @Override public Column execute(Cassandra.Client cassandra) throws HectorException { try { if ( queryLogger.isDebugEnabled() ) { queryLogger.debug("---------\nColumnFamily: {} slicePredicate: {}", columnFamilyName, activeSlicePredicate.toString()); } K key = _keys.get(0); List<ColumnOrSuperColumn> cosclist = cassandra.get_slice(keySerializer.toByteBuffer(key), columnParent, activeSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); applyResultStatus(execTime, getCassandraHost()); applyToRow(key, cosclist); if ( queryLogger.isDebugEnabled() ) { queryLogger.debug("Execution took {} microseconds on host {}\n----------", lastExecutionTime, lastHostUsed); } } catch (Exception e) { throw exceptionsTranslator.translate(e); } hasValues = true; return null; } }); }
private <V> ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> indexedSlicesInternal( final IndexedSlicesPredicate<K, N, V> predicate, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer, List<ColumnOrSuperColumn>> execute(Client cassandra) throws HectorException { try { List<KeySlice> keySlices = cassandra.get_indexed_slices( columnParent, predicate.toThrift(), workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily))); if (keySlices == null || keySlices.isEmpty()) { return new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(0); } LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>> ret = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(keySlices.size()); for (KeySlice keySlice : keySlices) { ret.put(ByteBuffer.wrap(keySlice.getKey()), keySlice.getColumns()); } return ret; } catch (Exception e) { throw exceptionsTranslator.translate(e); } } }); } }
private void doExecuteMultigetSlice() { keyspace.doExecuteOperation(new Operation<Column>(OperationType.READ) { @Override public Column execute(Cassandra.Client cassandra) throws HectorException { try { if ( queryLogger.isDebugEnabled() ) { queryLogger.debug("---------\nColumnFamily multiget: {} slicePredicate: {}", columnFamilyName, activeSlicePredicate.toString()); } rows = cassandra.multiget_slice(keySerializer.toBytesList(_keys), columnParent, activeSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); applyResultStatus(execTime, getCassandraHost()); if ( queryLogger.isDebugEnabled() ) { queryLogger.debug("Execution took {} microseconds on host {}\n----------", lastExecutionTime, lastHostUsed); } } catch (Exception e) { throw exceptionsTranslator.translate(e); } hasValues = true; return null; } }); applyToRow(_keys.get(0), rows.get(keySerializer.toByteBuffer(_keys.get(0)))); }
keyspace.doExecuteOperation(new Operation<CqlRows<K, N, V>>(OperationType.READ) {
/** * Batch executes all mutations scheduled to this Mutator instance by addInsertion, addDeletion etc. * May throw a HectorException which is a RuntimeException. * @return A MutationResult holds the status. */ @Override public MutationResult execute() { if (pendingMutations == null || pendingMutations.isEmpty()) { return new MutationResultImpl(true, 0, null); } final BatchMutation<K> mutations = pendingMutations.makeCopy(); pendingMutations = null; return new MutationResultImpl(keyspace.doExecuteOperation(new Operation<Void>(OperationType.WRITE) { @Override public Void execute(Cassandra.Client cassandra) throws Exception { cassandra.batch_mutate(mutations.getMutationMap(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); return null; } })); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> sliceInternal(final K key, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(); try { ByteBuffer sKey = keySerializer.toByteBuffer(key); cosc.put(sKey, cassandra.get_slice(sKey, columnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily)))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> sliceInternal(final K key, final HSlicePredicate<SN> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(); try { ByteBuffer sKey = keySerializer.toByteBuffer(key); cosc.put(sKey, cassandra.get_slice(sKey, columnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType)))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> multigetSliceInternal(final Iterable<K> keys, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { try { List<K> keyList = new ArrayList<K>(); Iterators.addAll(keyList, keys.iterator()); return cassandra.multiget_slice(keySerializer.toBytesList(keyList), columnParent, (workingSlicePredicate == null ? activeSlicePredicate.setColumnNames(columnValueSerializers.keySet()).toThrift() : workingSlicePredicate.toThrift()), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> multigetSliceInternal(final Iterable<K> keys, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { try { List<K> keyList = new ArrayList<K>(); Iterators.addAll(keyList, keys.iterator()); return cassandra.multiget_slice(keySerializer.toBytesList(keyList), columnParent, (workingSlicePredicate == null ? activeSlicePredicate.setColumnNames(columnValueSerializers.keySet()).toThrift() : workingSlicePredicate.toThrift()), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> sliceInternal(final K key, final HSlicePredicate<N> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(); try { ByteBuffer sKey = keySerializer.toByteBuffer(key); cosc.put(sKey, cassandra.get_slice(sKey, columnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType, columnFamily)))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> sliceInternal(final K key, final HSlicePredicate<SN> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc = new LinkedHashMap<ByteBuffer, List<ColumnOrSuperColumn>>(); try { ByteBuffer sKey = keySerializer.toByteBuffer(key); cosc.put(sKey, cassandra.get_slice(sKey, columnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType)))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> multigetSliceInternal(final List<K> keys, final ColumnParent workingColumnParent, final HSlicePredicate<SN> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc; try { List<ByteBuffer> sKeys = keySerializer.toBytesList(keys); cosc = cassandra.multiget_slice(sKeys, workingColumnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private ExecutionResult<Map<ByteBuffer, List<ColumnOrSuperColumn>>> multigetSliceInternal(final List<K> keys, final ColumnParent workingColumnParent, final HSlicePredicate<SN> workingSlicePredicate) { return ((ExecutingKeyspace)keyspace).doExecuteOperation(new Operation<Map<ByteBuffer,List<ColumnOrSuperColumn>>>(OperationType.READ) { @Override public Map<ByteBuffer,List<ColumnOrSuperColumn>> execute(Cassandra.Client cassandra) throws HectorException { Map<ByteBuffer,List<ColumnOrSuperColumn>> cosc; try { List<ByteBuffer> sKeys = keySerializer.toBytesList(keys); cosc = cassandra.multiget_slice(sKeys, workingColumnParent, workingSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); } catch (Exception e) { throw exceptionsTranslator.translate(e); } return cosc; } }); }
private void doExecuteSlice() { keyspace.doExecuteOperation(new Operation<Column>(OperationType.READ) { @Override public Column execute(Cassandra.Client cassandra) throws HectorException { try { if ( queryLogger.isDebugEnabled() ) { queryLogger.debug("---------\nColumnFamily: {} slicePredicate: {}", columnFamilyName, activeSlicePredicate.toString()); } K key = _keys.get(0); List<ColumnOrSuperColumn> cosclist = cassandra.get_slice(keySerializer.toByteBuffer(key), columnParent, activeSlicePredicate.toThrift(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); applyResultStatus(execTime, getCassandraHost()); applyToRow(key, cosclist); if ( queryLogger.isDebugEnabled() ) { queryLogger.debug("Execution took {} microseconds on host {}\n----------", lastExecutionTime, lastHostUsed); } } catch (Exception e) { throw exceptionsTranslator.translate(e); } hasValues = true; return null; } }); }
/** * Batch executes all mutations scheduled to this Mutator instance by addInsertion, addDeletion etc. * May throw a HectorException which is a RuntimeException. * @return A MutationResult holds the status. */ @Override public MutationResult execute() { if (pendingMutations == null || pendingMutations.isEmpty()) { return new MutationResultImpl(true, 0, null); } final BatchMutation<K> mutations = pendingMutations.makeCopy(); pendingMutations = null; return new MutationResultImpl(keyspace.doExecuteOperation(new Operation<Void>(OperationType.WRITE) { @Override public Void execute(Cassandra.Client cassandra) throws Exception { cassandra.batch_mutate(mutations.getMutationMap(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); return null; } })); }
/** * Batch executes all mutations scheduled to this Mutator instance by addInsertion, addDeletion etc. * May throw a HectorException which is a RuntimeException. * @return A MutationResult holds the status. */ @Override public MutationResult execute() { if (pendingMutations == null || pendingMutations.isEmpty()) { return new MutationResultImpl(true, 0, null); } final BatchMutation<K> mutations = pendingMutations.makeCopy(); pendingMutations = null; return new MutationResultImpl(keyspace.doExecuteOperation(new Operation<Void>(OperationType.WRITE) { @Override public Void execute(Cassandra.Client cassandra) throws Exception { cassandra.batch_mutate(mutations.getMutationMap(), ThriftConverter.consistencyLevel(consistencyLevelPolicy.get(operationType))); return null; } })); }