Composite startComposite = CellNames.simpleDense(query.getSliceStart().asByteBuffer()); Composite endComposite = CellNames.simpleDense(query.getSliceEnd().asByteBuffer()); SliceQueryFilter sqf = new SliceQueryFilter(startComposite, endComposite, false, query.getLimit() + (query.hasLimit()?1:0)); ReadCommand sliceCmd = new SliceFromReadCommand(keyspace, query.getKey().asByteBuffer(), columnFamily, nowMillis, sqf);
public SliceQueryFilter withUpdatedCount(int newCount) { return new SliceQueryFilter(slices, reversed, newCount, compositesToGroup); }
public SliceQueryFilter withUpdatedSlices(ColumnSlice[] newSlices) { return new SliceQueryFilter(newSlices, reversed, count, compositesToGroup); }
public SliceQueryFilter cloneShallow() { return new SliceQueryFilter(slices, reversed, count, compositesToGroup); }
/** * @return a QueryFilter object to satisfy the given slice criteria: * @param key the row to slice * @param cfName column family to query * @param start column to start slice at, inclusive; empty for "the first column" * @param finish column to stop slice at, inclusive; empty for "the last column" * @param reversed true to start with the largest column (as determined by configured sort order) instead of smallest * @param limit maximum number of non-deleted columns to return * @param timestamp time to use for determining expiring columns' state */ public static QueryFilter getSliceFilter(DecoratedKey key, String cfName, Composite start, Composite finish, boolean reversed, int limit, long timestamp) { return new QueryFilter(key, cfName, new SliceQueryFilter(start, finish, reversed, limit), timestamp); }
private SliceQueryFilter sliceFilter(ColumnSlice[] slices, int limit, int toGroup) { assert ColumnSlice.validateSlices(slices, cfm.comparator, isReversed) : String.format("Invalid slices: " + Arrays.toString(slices) + (isReversed ? " (reversed)" : "")); return new SliceQueryFilter(slices, isReversed, limit, toGroup); }
public SliceQueryFilter deserialize(DataInput in, int version) throws IOException { ColumnSlice[] slices; slices = new ColumnSlice[in.readInt()]; for (int i = 0; i < slices.length; i++) slices[i] = type.sliceSerializer().deserialize(in, version); boolean reversed = in.readBoolean(); int count = in.readInt(); int compositesToGroup = in.readInt(); return new SliceQueryFilter(slices, reversed, count, compositesToGroup); }
public SliceQueryFilter readFilterForCache() { // We create a new filter everytime before for now SliceQueryFilter is unfortunatly mutable. return new SliceQueryFilter(ColumnSlice.ALL_COLUMNS_ARRAY, false, metadata.getCaching().rowCache.rowsToCache, metadata.clusteringColumns().size()); }
/** * Splits this filter into two SliceQueryFilters: one that slices only the static columns, and one that slices the * remainder of the normal data. * * This should only be called when the filter is reversed and the filter is known to cover static columns (through * hasStaticSlice()). * * @return a pair of (static, normal) SliceQueryFilters */ public Pair<SliceQueryFilter, SliceQueryFilter> splitOutStaticSlice(CFMetaData cfm) { assert reversed; Composite staticSliceEnd = cfm.comparator.staticPrefix().end(); List<ColumnSlice> nonStaticSlices = new ArrayList<>(slices.length); for (ColumnSlice slice : slices) { if (sliceIncludesStatics(slice, cfm)) nonStaticSlices.add(new ColumnSlice(slice.start, staticSliceEnd)); else nonStaticSlices.add(slice); } return Pair.create( new SliceQueryFilter(staticSliceEnd, Composites.EMPTY, true, count, compositesToGroup), new SliceQueryFilter(nonStaticSlices.toArray(new ColumnSlice[nonStaticSlices.size()]), true, count, compositesToGroup)); }
public IDiskAtomFilter readFilter() { assert !conditions.isEmpty(); ColumnSlice[] slices = new ColumnSlice[conditions.size()]; int i = 0; // We always read CQL rows entirely as on CAS failure we want to be able to distinguish between "row exists // but all values for which there were conditions are null" and "row doesn't exists", and we can't rely on the // row marker for that (see #6623) for (Composite prefix : conditions.keySet()) slices[i++] = prefix.slice(); int toGroup = cfm.comparator.isDense() ? -1 : cfm.clusteringColumns().size(); slices = ColumnSlice.deoverlapSlices(slices, cfm.comparator); assert ColumnSlice.validateSlices(slices, cfm.comparator, false); return new SliceQueryFilter(slices, false, slices.length, toGroup); }
private Map<CellName, ColumnFamily> getCellNameColumnFamilyMap(DecoratedKey dk, ColumnSlice[] columnSlices) { SliceQueryFilter sliceQueryFilter = new SliceQueryFilter(columnSlices, false, Integer.MAX_VALUE); QueryFilter queryFilter = new QueryFilter(dk, tableMapper.table.name, sliceQueryFilter, filter.timestamp); ColumnFamily columnFamily = tableMapper.table.getColumnFamily(queryFilter); return tableMapper.getRows(columnFamily); }
public static IDiskAtomFilter fromSCNamesFilter(CellNameType type, ByteBuffer scName, NamesQueryFilter filter) { if (scName == null) { ColumnSlice[] slices = new ColumnSlice[filter.columns.size()]; int i = 0; for (CellName name : filter.columns) { // Note that, because the filter in argument is the one from thrift, 'name' are SimpleDenseCellName. // So calling name.slice() would be incorrect, as simple cell names don't handle the EOC properly. // This is why we call toByteBuffer() and rebuild a Composite of the right type before call slice(). slices[i++] = type.make(name.toByteBuffer()).slice(); } return new SliceQueryFilter(slices, false, slices.length, 1); } else { SortedSet<CellName> newColumns = new TreeSet<>(type); for (CellName c : filter.columns) newColumns.add(type.makeCellName(scName, c.toByteBuffer())); return filter.withUpdatedColumns(newColumns); } }
private List<Row> getHintsSlice(int columnCount) { // Get count # of columns... SliceQueryFilter predicate = new SliceQueryFilter(ColumnSlice.ALL_COLUMNS_ARRAY, false, columnCount); // From keys "" to ""... IPartitioner partitioner = StorageService.getPartitioner(); RowPosition minPos = partitioner.getMinimumToken().minKeyBound(); Range<RowPosition> range = new Range<>(minPos, minPos); try { RangeSliceCommand cmd = new RangeSliceCommand(Keyspace.SYSTEM_KS, SystemKeyspace.HINTS_CF, System.currentTimeMillis(), predicate, range, null, LARGE_NUMBER); return StorageProxy.getRangeSlice(cmd, ConsistencyLevel.ONE); } catch (Exception e) { logger.info("HintsCF getEPPendingHints timed out."); throw new RuntimeException(e); } }
public IDiskAtomFilter readFilter() { return expected == null || expected.isEmpty() ? new SliceQueryFilter(ColumnSlice.ALL_COLUMNS_ARRAY, false, 1) : new NamesQueryFilter(ImmutableSortedSet.copyOf(expected.getComparator(), expected.getColumnNames())); }
private SliceQueryFilter toInternalFilter(CFMetaData metadata, ColumnParent parent, SliceRange range) { if (metadata.isSuper()) { CellNameType columnType = new SimpleDenseCellNameType(metadata.comparator.subtype(parent.isSetSuper_column() ? 1 : 0)); Composite start = columnType.fromByteBuffer(range.start); Composite finish = columnType.fromByteBuffer(range.finish); SliceQueryFilter filter = new SliceQueryFilter(start, finish, range.reversed, range.count); return SuperColumns.fromSCSliceFilter(metadata.comparator, parent.bufferForSuper_column(), filter); } Composite start = metadata.comparator.fromByteBuffer(range.start); Composite finish = metadata.comparator.fromByteBuffer(range.finish); return new SliceQueryFilter(start, finish, range.reversed, range.count); }
private void loadOldRow(DecoratedKey dk, ByteBuffer pkBuf, List<Field> fields) { CellName clusteringKey = tableMapper.makeClusteringKey(pkBuf); Composite start = tableMapper.start(clusteringKey); Composite end = tableMapper.end(start); ColumnSlice columnSlice = new ColumnSlice(start, end); SliceQueryFilter sliceQueryFilter = new SliceQueryFilter(columnSlice, false, Integer.MAX_VALUE); QueryFilter queryFilter = new QueryFilter(dk, tableMapper.table.name, sliceQueryFilter, new Date().getTime()); ColumnFamily columnFamily = tableMapper.table.getColumnFamily(queryFilter); Map<CellName, ColumnFamily> fullSlice = tableMapper.getRows(columnFamily); ColumnFamily oldDocument = fullSlice.get(clusteringKey); for (Cell cell : oldDocument) { CellName cellName = cell.name(); ColumnIdentifier cql3ColName = cellName.cql3ColumnName(tableMapper.cfMetaData); String actualColName = cql3ColName.toString(); ColumnDefinition columnDefinition = tableMapper.cfMetaData.getColumnDefinition(cql3ColName); if (options.shouldIndex(actualColName)) { addFields(cell, actualColName, columnDefinition, fields); } } }
public static IDiskAtomFilter asIFilter(SlicePredicate sp, CFMetaData metadata, ByteBuffer superColumn) { SliceRange sr = sp.slice_range; IDiskAtomFilter filter; CellNameType comparator = metadata.isSuper() ? new SimpleDenseCellNameType(metadata.comparator.subtype(superColumn == null ? 0 : 1)) : metadata.comparator; if (sr == null) { SortedSet<CellName> ss = new TreeSet<CellName>(comparator); for (ByteBuffer bb : sp.column_names) ss.add(comparator.cellFromByteBuffer(bb)); filter = new NamesQueryFilter(ss); } else { filter = new SliceQueryFilter(comparator.fromByteBuffer(sr.start), comparator.fromByteBuffer(sr.finish), sr.reversed, sr.count); } if (metadata.isSuper()) filter = SuperColumns.fromSCFilter(metadata.comparator, superColumn, filter); return filter; } }
private static IDiskAtomFilter filterFromSelect(SelectStatement select, CFMetaData metadata, List<ByteBuffer> variables) throws InvalidRequestException { if (select.isColumnRange() || select.getColumnNames().size() == 0) { AbstractType<?> comparator = metadata.comparator.asAbstractType(); return new SliceQueryFilter(metadata.comparator.fromByteBuffer(select.getColumnStart().getByteBuffer(comparator, variables)), metadata.comparator.fromByteBuffer(select.getColumnFinish().getByteBuffer(comparator, variables)), select.isColumnsReversed(), select.getColumnsLimit()); } else { return new NamesQueryFilter(getColumnNames(select, metadata, variables)); } }
public static abstract class AbstractScanIterator extends AbstractIterator<Row> implements CloseableIterator<Row> { public boolean needsFiltering() { return true; } }
public static SliceQueryFilter fromSCSliceFilter(CellNameType type, ByteBuffer scName, SliceQueryFilter filter) { assert filter.slices.length == 1; if (scName == null) { // The filter is on the super column name CBuilder builder = type.builder(); Composite start = filter.start().isEmpty() ? Composites.EMPTY : builder.buildWith(filter.start().toByteBuffer()).withEOC(filter.reversed ? Composite.EOC.END : Composite.EOC.START); Composite finish = filter.finish().isEmpty() ? Composites.EMPTY : builder.buildWith(filter.finish().toByteBuffer()).withEOC(filter.reversed ? Composite.EOC.START : Composite.EOC.END); return new SliceQueryFilter(start, finish, filter.reversed, filter.count, 1); } else { CBuilder builder = type.builder().add(scName); Composite start = filter.start().isEmpty() ? builder.build().withEOC(filter.reversed ? Composite.EOC.END : Composite.EOC.START) : builder.buildWith(filter.start().toByteBuffer()); Composite end = filter.finish().isEmpty() ? builder.build().withEOC(filter.reversed ? Composite.EOC.START : Composite.EOC.END) : builder.buildWith(filter.finish().toByteBuffer()); return new SliceQueryFilter(start, end, filter.reversed, filter.count); } } }