private static Collection<IColumn> getFieldCacheEntries(IndexReader indexReader, String field) throws IOException { String indexName = SolandraCoreContainer.coreInfo.get().indexName + "~" + SolandraCoreContainer.coreInfo.get().shard; byte[] indexNameBytes = indexName.getBytes("UTF-8"); if(logger.isDebugEnabled()) logger.debug("Loading field cache from " + indexName + " " + field); ColumnParent fieldCacheParent = new ColumnParent(CassandraUtils.fieldCacheColumnFamily); ByteBuffer fieldCacheKey = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, field .getBytes()); List<Row> rows = CassandraUtils.robustRead(CassandraUtils.consistency, new SliceFromReadCommand( CassandraUtils.keySpace, fieldCacheKey, fieldCacheParent, ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE)); if (rows.isEmpty()) return Collections.emptyList(); Row row = rows.get(0); if (row.cf == null) return Collections.emptyList(); return row.cf.getSortedColumns(); }
List<Row> rows = CassandraUtils.robustRead(CassandraUtils.consistency, new SliceFromReadCommand( CassandraUtils.keySpace, termsListKey, fieldColumnFamily, CassandraUtils.createColumnName(startTerm), ByteBufferUtil.EMPTY_BYTE_BUFFER, false, bufferSize)); logger.debug("scanning row: " + ByteBufferUtil.string(rowKey)); reads.add((ReadCommand) new SliceFromReadCommand(CassandraUtils.keySpace, rowKey, columnParent, ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE));
ReadCommand cmd = new SliceFromReadCommand(CassandraUtils.keySpace, key, new ColumnParent(CassandraUtils.schemaInfoColumnFamily), startCol, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, pageSize);
public void deleteDocuments(String indexName, Term term, boolean autoCommit) throws CorruptIndexException, IOException { ColumnParent cp = new ColumnParent(CassandraUtils.termVecColumnFamily); ByteBuffer key = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, term .field().getBytes("UTF-8"), CassandraUtils.delimeterBytes, term.text().getBytes("UTF-8")); ReadCommand rc = new SliceFromReadCommand(CassandraUtils.keySpace, key, cp, ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE); List<Row> rows = CassandraUtils.robustRead(CassandraUtils.consistency, rc); // delete by documentId for (Row row : rows) { if (row.cf != null) { Collection<IColumn> columns = row.cf.getSortedColumns(); for (IColumn col : columns) { deleteLucandraDocument(indexName, CassandraUtils.readVInt(col.name()), autoCommit); } } } }
"shards".getBytes("UTF-8")); ReadCommand cmd = new SliceFromReadCommand(CassandraUtils.keySpace, key, new ColumnParent( CassandraUtils.schemaInfoColumnFamily), ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE);
readCommands.add(new SliceFromReadCommand(CassandraUtils.keySpace, key, columnParent, ByteBufferUtil.EMPTY_BYTE_BUFFER, CassandraUtils.finalTokenBytes, false, Integer.MAX_VALUE));
SliceQueryFilter sqf = new SliceQueryFilter(startComposite, endComposite, false, query.getLimit() + (query.hasLimit()?1:0)); ReadCommand sliceCmd = new SliceFromReadCommand(keyspace, query.getKey().asByteBuffer(), columnFamily, nowMillis, sqf);
public SliceFromReadCommand withUpdatedFilter(SliceQueryFilter newFilter) { return new SliceFromReadCommand(ksName, key, cfName, timestamp, newFilter); }
public static ReadCommand create(String ksName, ByteBuffer key, String cfName, long timestamp, IDiskAtomFilter filter) { if (filter instanceof SliceQueryFilter) return new SliceFromReadCommand(ksName, key, cfName, timestamp, (SliceQueryFilter)filter); else return new SliceByNamesReadCommand(ksName, key, cfName, timestamp, (NamesQueryFilter)filter); }
public ReadCommand copy() { return new SliceFromReadCommand(ksName, key, cfName, timestamp, filter).setIsDigestQuery(isDigestQuery()); }
public ReadCommand deserialize(DataInput in, int version) throws IOException { boolean isDigest = in.readBoolean(); String keyspaceName = in.readUTF(); ByteBuffer key = ByteBufferUtil.readWithShortLength(in); String cfName = in.readUTF(); long timestamp = in.readLong(); CFMetaData metadata = Schema.instance.getCFMetaData(keyspaceName, cfName); if (metadata == null) { String message = String.format("Got slice command for nonexistent table %s.%s. If the table was just " + "created, this is likely due to the schema not being fully propagated. Please wait for schema " + "agreement on table creation.", keyspaceName, cfName); throw new UnknownColumnFamilyException(message, null); } SliceQueryFilter filter = metadata.comparator.sliceQueryFilterSerializer().deserialize(in, version); return new SliceFromReadCommand(keyspaceName, key, cfName, timestamp, filter).setIsDigestQuery(isDigest); }
SliceFromReadCommand command = new SliceFromReadCommand(cfs.metadata.ksName, key, cfs.name, System.currentTimeMillis(), new IdentityQueryFilter()); final SliceQueryPager pager = new SliceQueryPager(command, null, null, true);
/** * Convenience method that count (live) cells/rows for a given slice of a row, but page underneath. */ public static int countPaged(String keyspace, String columnFamily, ByteBuffer key, SliceQueryFilter filter, ConsistencyLevel consistencyLevel, ClientState cState, final int pageSize, long now) throws RequestValidationException, RequestExecutionException { SliceFromReadCommand command = new SliceFromReadCommand(keyspace, key, columnFamily, now, filter); final SliceQueryPager pager = new SliceQueryPager(command, consistencyLevel, cState, false); ColumnCounter counter = filter.columnCounter(Schema.instance.getCFMetaData(keyspace, columnFamily).comparator, now); while (!pager.isExhausted()) { List<Row> next = pager.fetchPage(pageSize); if (!next.isEmpty()) counter.countAll(next.get(0).cf); } return counter.live(); } }
ReadCommand readCommand = new SliceFromReadCommand(ks, rKey, new ColumnParent(cf), sname, ename, reversed, count); readCommand.setDigestQuery(false); commands.add(readCommand);
long now = System.currentTimeMillis(); for (ByteBuffer key : partitionKeys) commands.add(new SliceFromReadCommand(keyspace(), key, columnFamily(),
SliceQueryFilter sqf = new SliceQueryFilter(startComposite, endComposite, false, query.getLimit() + (query.hasLimit()?1:0)); ReadCommand sliceCmd = new SliceFromReadCommand(keyspace, query.getKey().asByteBuffer(), columnFamily, nowMillis, sqf);
ReadCommand sliceCmd = new SliceFromReadCommand(keyspace, query.getKey().asByteBuffer(), columnFamily, nowMillis, sqf);
commands.add(new SliceFromReadCommand(metadata.ksName, key, select.getColumnFamily(),