Refine search
private List<KeySlice> getKeySlice(ByteBuffer startKey, ByteBuffer endKey, SliceQuery columnSlice, int count) throws BackendException { return getRangeSlices(new org.apache.cassandra.thrift.KeyRange().setStart_key(startKey).setEnd_key(endKey).setCount(count), columnSlice); }
private <T extends Token> List<KeySlice> getTokenSlice(T startToken, T endToken, SliceQuery sliceQuery, int count) throws BackendException { String st = sanitizeBrokenByteToken(startToken); String et = sanitizeBrokenByteToken(endToken); org.apache.cassandra.thrift.KeyRange kr = new org.apache.cassandra.thrift.KeyRange().setStart_token(st).setEnd_token(et).setCount(count); return getRangeSlices(kr, sliceQuery); }
private KeyRange getKeyRange(byte[] startKey, byte[] endExclusive) { KeyRange keyRange = new KeyRange(batchHint); keyRange.setStart_key(startKey); if (endExclusive.length == 0) { keyRange.setEnd_key(endExclusive); } else { // We need the previous name because this is inclusive, not exclusive keyRange.setEnd_key(RangeRequests.previousLexicographicName(endExclusive)); } return keyRange; } }
SlicePredicate slicePredicate = new SlicePredicate(); slicePredicate.setColumn_names(asList); sliceRange.setStart(ByteBufferUtil.EMPTY_BYTE_BUFFER); sliceRange.setFinish(ByteBufferUtil.EMPTY_BYTE_BUFFER); slicePredicate.setSlice_range(sliceRange); KeyRange keyRange = new KeyRange(maxResult); keyRange.setStart_key(ByteBufferUtil.EMPTY_BYTE_BUFFER); keyRange.setEnd_key(ByteBufferUtil.EMPTY_BYTE_BUFFER); List<KeySlice> ks = conn.getClient().get_range_slices(new ColumnParent(m.getTableName()), slicePredicate, keyRange, getConsistencyLevel()); entities = onCounterColumn(m, isRelation, relations, ks); List<KeySlice> keySlices = conn.getClient().get_range_slices(new ColumnParent(m.getTableName()), slicePredicate, keyRange, getConsistencyLevel()); for (IndexClause ix : ixClause) List<KeySlice> keySlices = conn.getClient().get_indexed_slices(new ColumnParent(m.getTableName()), ix, slicePredicate, getConsistencyLevel());
SlicePredicate predicate; if (fields == null) { predicate = new SlicePredicate().setSlice_range(new SliceRange( EMPTY_BYTE_BUFFER, EMPTY_BYTE_BUFFER, false, 1000000)); predicate = new SlicePredicate().setColumn_names(fieldlist); KeyRange kr = new KeyRange().setStart_key(startkey.getBytes("UTF-8")) .setEnd_key(new byte[] {}).setCount(recordcount);
List<String> columns, List<IndexExpression> conditions, int maxResults) throws Exception SlicePredicate slicePredicate = new SlicePredicate(); slicePredicate.setColumn_names(asList); sliceRange.setStart(ByteBufferUtil.EMPTY_BYTE_BUFFER); sliceRange.setFinish(ByteBufferUtil.EMPTY_BYTE_BUFFER); slicePredicate.setSlice_range(sliceRange); KeyRange keyRange = new KeyRange(maxResults); keyRange.setStart_key(minVal == null ? "".getBytes() : minVal); keyRange.setEnd_key(maxVal == null ? "".getBytes() : maxVal); ColumnParent cp = new ColumnParent(m.getTableName()); keyRange.setRow_filter(conditions); keyRange.setRow_filterIsSet(true);
/** * Set the KeyRange to limit the rows. * @param conf Job configuration you are about to run */ public static void setInputRange(Configuration conf, String startToken, String endToken, List<IndexExpression> filter) { KeyRange range = new KeyRange().setStart_token(startToken).setEnd_token(endToken).setRow_filter(filter); conf.set(INPUT_KEYRANGE_CONFIG, thriftToString(range)); }
case START_KEY: if (value == null) { unsetStart_key(); } else { setStart_key((ByteBuffer)value); unsetEnd_key(); } else { setEnd_key((ByteBuffer)value); unsetStart_token(); } else { setStart_token((String)value); unsetEnd_token(); } else { setEnd_token((String)value); unsetRow_filter(); } else { setRow_filter((List<IndexExpression>)value); unsetCount(); } else { setCount((Integer)value);
/** * * @return The thrift representation of this object */ public KeyRange toThrift() { KeyRange keyRange = new KeyRange(rowCount); if(startToken != null) keyRange.setStart_token(startToken); else keyRange.setStart_key(startKey == null ? ByteBuffer.wrap(new byte[0]) : keySerializer.toByteBuffer(startKey)); if(endToken != null) keyRange.setEnd_token(endToken); else keyRange.setEnd_key(endKey == null ? ByteBuffer.wrap(new byte[0]) : keySerializer.toByteBuffer(endKey)); if (rowFilters != null) for (IndexExpression filter : rowFilters) { keyRange.addToRow_filter(filter); } return keyRange; }
String endToken = query.getEndToken() == null ? partitioner.getMaxToken() : query.getEndToken(); range = new KeyRange() .setCount(query.getBlockSize()) .setStart_token(startToken) .setEnd_token(endToken); if (query.getPredicate().isSetSlice_range() && query.getPredicate().getSlice_range().getCount() == 0) { bIgnoreTombstones = false;
public KeyRange prefixKeyRange(KeyRange unprefixed) { KeyRange prefixed = new KeyRange(); prefixed.count = unprefixed.count; prefixed.end_token = unprefixed.end_token; prefixed.end_key = ps.toByteBuffer(unprefixed.end_key); prefixed.start_token = unprefixed.start_token; prefixed.start_key = ps.toByteBuffer(unprefixed.start_key); return prefixed; }
/** * Set the KeyRange to limit the rows. * @param conf Job configuration you are about to run */ public static void setInputRange(Configuration conf, String startToken, String endToken) { KeyRange range = new KeyRange().setStart_token(startToken).setEnd_token(endToken); conf.set(INPUT_KEYRANGE_CONFIG, thriftToString(range)); }
if (conditions != null) keyRange.setRow_filter(conditions); keyRange.setRow_filterIsSet(true); List<KeySlice> keys = selector.getKeySlices(new ColumnParent(m.getTableName()), keyRange, slicePredicate, getConsistencyLevel());
/** * Set the KeyRange to limit the rows. * @param conf Job configuration you are about to run */ public static void setInputRange(Configuration conf, List<IndexExpression> filter) { KeyRange range = new KeyRange().setRow_filter(filter); conf.set(INPUT_KEYRANGE_CONFIG, thriftToString(range)); }
public KeyRange setStart_key(byte[] start_key) { setStart_key(start_key == null ? (ByteBuffer)null : ByteBuffer.wrap(start_key)); return this; }
public KeyRange setEnd_key(byte[] end_key) { setEnd_key(end_key == null ? (ByteBuffer)null : ByteBuffer.wrap(end_key)); return this; }
public Object getFieldValue(_Fields field) { switch (field) { case START_KEY: return getStart_key(); case END_KEY: return getEnd_key(); case START_TOKEN: return getStart_token(); case END_TOKEN: return getEnd_token(); case ROW_FILTER: return getRow_filter(); case COUNT: return Integer.valueOf(getCount()); } throw new IllegalStateException(); }
if (query.getRepeatLastToken()) { range.setStart_token(partitioner.getTokenMinusOne(token)); range.setStart_token(token);
public List<KeySlice> getRows(String kvsMethodName, KeyRange keyRange, SlicePredicate slicePredicate) { InetSocketAddress host = clientPool.getRandomHostForKey(keyRange.getStart_key()); return clientPool.runWithRetryOnHost( host, new FunctionCheckedException<CassandraClient, List<KeySlice>, RuntimeException>() { @Override public List<KeySlice> apply(CassandraClient client) { try { return queryRunner.run(client, tableRef, () -> client.get_range_slices(kvsMethodName, tableRef, slicePredicate, keyRange, consistency)); } catch (UnavailableException e) { throw new InsufficientConsistencyException("get_range_slices requires " + consistency + " Cassandra nodes to be up and available.", e); } catch (Exception e) { throw Throwables.unwrapAndThrowAtlasDbDependencyException(e); } } @Override public String toString() { return "get_range_slices(" + tableRef + ")"; } }); } }
@Test public void testGetSuperRangeSlices() throws HectorException { for (int i = 0; i < 10; i++) { ColumnPath cp = new ColumnPath("Super1"); cp.setSuper_column(bytes("SuperColumn_1")); cp.setColumn(bytes("testGetSuperRangeSlices_" + i)); keyspace.insert("testGetSuperRangeSlices0", cp, StringSerializer.get().toByteBuffer("testGetSuperRangeSlices_Value_" + i)); keyspace.insert("testGetSuperRangeSlices1", cp, StringSerializer.get().toByteBuffer("testGetSuperRangeSlices_Value_" + i)); } // get value ColumnParent clp = new ColumnParent("Super1"); SliceRange sr = new SliceRange(ByteBuffer.wrap(new byte[0]), ByteBuffer.wrap(new byte[0]), false, 150); SlicePredicate sp = new SlicePredicate(); sp.setSlice_range(sr); KeyRange range = new KeyRange(); range.setStart_key( "".getBytes()); range.setEnd_key( "".getBytes()); Map<String, List<SuperColumn>> keySlices = se.fromBytesMap(keyspace.getSuperRangeSlices(clp, sp, range)); assertNotNull(keySlices); assertNotNull("testGetSuperRangSlices0 is null", keySlices.get("testGetSuperRangeSlices0")); assertEquals("testGetSuperRangeSlices_Value_0", string(keySlices.get("testGetSuperRangeSlices0").get(0).getColumns().get(0).getValue())); assertEquals(1, keySlices.get("testGetSuperRangeSlices1").size()); assertEquals(10, keySlices.get("testGetSuperRangeSlices1").get(0).getColumns().size()); ColumnPath cp = new ColumnPath("Super1"); keyspace.remove("testGetSuperRangeSlices0", cp); keyspace.remove("testGetSuperRangeSlices1", cp); }