/** * Prepare column. * * @param value * the value * @param name * the name * @param timestamp * the timestamp * @param ttl * TODO * @return the column */ private Column prepareColumn(byte[] value, byte[] name, long timestamp, int ttl) { Column column = new Column(); column.setName(name); column.setValue(value); column.setTimestamp(timestamp); if (ttl != 0) { column.setTtl(ttl); } return column; }
Map<String, Field> superColumnFieldMap) for (Column column : superColumn.getColumns()) String thriftColumnName = PropertyAccessorFactory.STRING.fromBytes(String.class, column.getName()); byte[] thriftColumnValue = column.getValue(); PropertyAccessorHelper.set(embeddedObject, superColumnFieldMap.get(thriftColumnName), thriftColumnValue);
@Override public Object getMetaData(ColumnOrSuperColumn element, EntryMetaData meta) { switch(meta) { case TIMESTAMP: return element.getColumn().getTimestamp(); case TTL: return element.getColumn().getTtl(); default: throw new UnsupportedOperationException("Unsupported meta data: " + meta); } } }
ColumnOrSuperColumn cosc = new ColumnOrSuperColumn(); Column column = new Column(ent.getColumnAs(StaticBuffer.BB_FACTORY)); column.setValue(ent.getValueAs(StaticBuffer.BB_FACTORY)); column.setTimestamp(commitTime.getAdditionTime(times)); column.setTtl(ttl); cosc.setColumn(column); org.apache.cassandra.thrift.Mutation m = new org.apache.cassandra.thrift.Mutation(); m.setColumn_or_supercolumn(cosc); Cassandra.Client client = conn.getClient(); if (atomicBatch) { client.atomic_batch_mutate(batch, consistency); } else { client.batch_mutate(batch, consistency);
for (Object value : values) Column column = new Column(); column.setName(PropertyAccessorFactory.STRING.toBytes(invJoinColumnName + Constants.JOIN_COLUMN_NAME_SEPARATOR + value)); column.setValue(PropertyAccessorHelper.getBytes(value)); column.setTimestamp(generator.getTimestamp()); columnType = value.getClass(); columns.add(column); mut.setColumn_or_supercolumn(new ColumnOrSuperColumn().setColumn(column)); insertionList.add(mut); conn.getClient().set_keyspace(entityMetadata.getSchema()); conn.getClient().batch_mutate(mulationMap, getConsistencyLevel());
@Override public void insert(String key, ColumnPath columnPath, ByteBuffer value, long timestamp) throws HectorException { // valideColumnPath(columnPath); ColumnParent columnParent = new ColumnParent(columnPath.getColumn_family()); if (columnPath.isSetSuper_column()) { columnParent.setSuper_column(columnPath.getSuper_column()); } Column column = new Column(ByteBuffer.wrap(columnPath.getColumn())); column.setValue(value); column.setTimestamp(timestamp); insert(StringSerializer.get().toByteBuffer(key), columnParent, column); }
/** * Test insertion of a supercolumn using insert */ @Test public void testInsertSuper() throws IllegalArgumentException, NoSuchElementException, IllegalStateException, HNotFoundException, Exception { // insert value ColumnParent columnParent = new ColumnParent("Super1"); columnParent.setSuper_column(StringSerializer.get().toByteBuffer("testInsertSuper_super")); Column column = new Column(StringSerializer.get().toByteBuffer("testInsertSuper_column")); column.setValue(StringSerializer.get().toByteBuffer("testInsertSuper_value")); column.setTimestamp(connectionManager.createClock()); keyspace.insert(StringSerializer.get().toByteBuffer("testInsertSuper_key"), columnParent, column); column.setName(StringSerializer.get().toByteBuffer("testInsertSuper_column2")); keyspace.insert(StringSerializer.get().toByteBuffer("testInsertSuper_key"), columnParent, column); // get value and assert ColumnPath cp2 = new ColumnPath("Super1"); cp2.setSuper_column(bytes("testInsertSuper_super")); SuperColumn sc = keyspace.getSuperColumn("testInsertSuper_key", cp2); assertNotNull(sc); assertEquals("testInsertSuper_super", string(sc.getName())); assertEquals(2, sc.getColumns().size()); assertEquals("testInsertSuper_value", string(sc.getColumns().get(0).getValue())); // remove value keyspace.remove("testInsertSuper_super", cp2); }
public HColumnImpl(N name, V value, long clock, Serializer<N> nameSerializer, Serializer<V> valueSerializer) { this(nameSerializer, valueSerializer); notNull(name, "name is null"); notNull(value, "value is null"); this.column = new Column(nameSerializer.toByteBuffer(name)); this.column.setValue(valueSerializer.toByteBuffer(value)); this.column.setTimestamp(clock); }
SuperColumn thriftSuperColumn = new SuperColumn(); thriftSuperColumn.setName(indexColumnName); Column thriftColumn = new Column(); thriftColumn.setName(rowKey); thriftColumn.setValue(ecValue.getBytes()); thriftColumn.setTimestamp(generator.getTimestamp()); thriftSuperColumn.addToColumns(thriftColumn);
Column col = new Column(StringSerializer.get().toByteBuffer("testBatchMutateColumn_" + j)); col.setValue(StringSerializer.get().toByteBuffer("testBatchMutateColumn_value_" + j)); col.setTimestamp(connectionManager.createClock()); ColumnOrSuperColumn cosc = new ColumnOrSuperColumn(); cosc.setColumn(col); Mutation mutation = new Mutation(); mutation.setColumn_or_supercolumn(cosc); String value = string(col.getValue()); assertEquals("testBatchMutateColumn_value_" + j, value);
@Test public void testAddSuperInsertion() { Column column = new Column(StringSerializer.get().toByteBuffer("c_name")); column.setValue(StringSerializer.get().toByteBuffer("c_val")); column.setTimestamp(System.currentTimeMillis()); SuperColumn sc = new SuperColumn(StringSerializer.get().toByteBuffer("c_name"), Arrays.asList(column)); batchMutate.addSuperInsertion("key1", columnFamilies, sc); // assert there is one outter map row with 'key' as the key assertEquals(1, batchMutate.getMutationMap().get(StringSerializer.get().toByteBuffer("key1")).size()); // add again with a different column and verify there is one key and two mutations underneath // for "standard1" column = new Column(StringSerializer.get().toByteBuffer("c_name")); column.setValue(StringSerializer.get().toByteBuffer("c_val")); column.setTimestamp(System.currentTimeMillis()); SuperColumn sc2 = new SuperColumn(StringSerializer.get().toByteBuffer("c_name2"), Arrays.asList(column)); batchMutate.addSuperInsertion("key1", columnFamilies, sc2); assertEquals(2, batchMutate.getMutationMap().get(StringSerializer.get().toByteBuffer("key1")).get("Standard1").size()); }
@Test public void getSizeForColumnOrSuperColumnWithAnEmptyColumn() { assertThat(ThriftObjectSizeUtils.getColumnOrSuperColumnSize(new ColumnOrSuperColumn().setColumn(new Column()))) .isEqualTo(NULL_SIZE * 8); }
ColumnParent parent = new ColumnParent(columnFamily); if(superColumnName != null) parent.setSuper_column(superColumnName); boolean isSuperCF = cfDef.column_type.equals("Super"); List<ColumnOrSuperColumn> columns = thriftClient.get_slice(key, parent, predicate, consistencyLevel); AbstractType<?> validator; if (cosc.isSetSuper_column()) for (Column col : superColumn.getColumns()) validator = getValidatorForValue(cfDef, col.getName()); sessionState.out.printf("%n (name=%s, value=%s, timestamp=%d%s)", formatSubcolumnName(keyspace, columnFamily, col.name), validator.getString(col.value), col.timestamp, col.isSetTtl() ? String.format(", ttl=%d", col.getTtl()) : ""); else if (cosc.isSetColumn()) validator = getValidatorForValue(cfDef, column.getName()); validator.getString(column.value), column.timestamp, column.isSetTtl() ? String.format(", ttl=%d", column.getTtl()) : ""); else if (cosc.isSetCounter_super_column())
private static boolean isResultSuccessful(CqlRow cqlRow) { Column appliedColumn = cqlRow.getColumns() .stream() .filter(column -> APPLIED_COLUMN.equals(decodeCqlColumnName(column))) .findFirst() .orElseThrow(() -> new IllegalStateException("CQL row " + cqlRow + " was missing an [applied] column")); return Arrays.equals(SUCCESSFUL_OPERATION, appliedColumn.getValue()); }
String scName = PropertyAccessorFactory.STRING.fromBytes(String.class, sc.getName()); String scNamePrefix = null; for (Column column : sc.getColumns()) String name = PropertyAccessorFactory.STRING.fromBytes(String.class, column.getName()); byte[] value = column.getValue(); if (value == null) embeddedObject = superColumnClass.newInstance(); for (Column column : sc.getColumns()) String name = PropertyAccessorFactory.STRING.fromBytes(String.class, column.getName()); byte[] value = column.getValue();
public Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); case VALUE: return getValue(); case TIMESTAMP: return Long.valueOf(getTimestamp()); case TTL: return Integer.valueOf(getTtl()); } throw new IllegalStateException(); }
for (Column column : row.getColumns()) { if (column != null) { String thriftColumnName = PropertyAccessorFactory.STRING.fromBytes(String.class, column.getName()); if (column.getValue() == null) { entity.put(thriftColumnName, null); } else { entity.put(thriftColumnName, composeColumnValue(cqlResult.getSchema(), column.getValue(), column.getName()));
/** * Performs a deep copy on <i>other</i>. */ public ColumnOrSuperColumn(ColumnOrSuperColumn other) { if (other.isSetColumn()) { this.column = new Column(other.column); } if (other.isSetSuper_column()) { this.super_column = new SuperColumn(other.super_column); } if (other.isSetCounter_column()) { this.counter_column = new CounterColumn(other.counter_column); } if (other.isSetCounter_super_column()) { this.counter_super_column = new CounterSuperColumn(other.counter_super_column); } }
@Override public HColumn<N, V> apply(V value, long clock, int ttl) { setValue(value); column.setTimestamp(clock); column.setTtl(ttl); return this; }