public static List<ByteBuffer> bytebuffers( List<?> l ) { List<ByteBuffer> results = new ArrayList<ByteBuffer>( l.size() ); for ( Object o : l ) { results.add( bytebuffer( o ) ); } return results; }
public static void addDeleteToMutator( Mutator<ByteBuffer> m, Object columnFamily, Object key, Object columnName, long timestamp ) throws Exception { logBatchOperation( "Delete", columnFamily, key, columnName, null, timestamp ); if ( columnName instanceof List<?> ) { columnName = DynamicComposite.toByteBuffer( ( List<?> ) columnName ); } m.addDeletion( bytebuffer( key ), columnFamily.toString(), bytebuffer( columnName ), be, timestamp ); }
/** * Encode a message into a set of columns. JMS properties are encoded as strings and longs everything else is binary * JSON. */ public static Map<ByteBuffer, ByteBuffer> serializeMessage( Message message ) { if ( message == null ) { return null; } Map<ByteBuffer, ByteBuffer> columns = new HashMap<ByteBuffer, ByteBuffer>(); for ( Entry<String, Object> property : message.getProperties().entrySet() ) { if ( property.getValue() == null ) { columns.put( bytebuffer( property.getKey() ), null ); } else if ( MESSAGE_TYPE.equals( property.getKey() ) || MESSAGE_ID.equals( property.getKey() ) ) { columns.put( bytebuffer( property.getKey() ), bytebuffer( property.getValue() ) ); } else { columns.put( bytebuffer( property.getKey() ), JsonUtils.toByteBuffer( property.getValue() ) ); } } return columns; }
public static Map<ByteBuffer, ByteBuffer> serializeQueue( Queue queue ) { if ( queue == null ) { return null; } Map<ByteBuffer, ByteBuffer> columns = new HashMap<ByteBuffer, ByteBuffer>(); for ( Entry<String, Object> property : queue.getProperties().entrySet() ) { if ( property.getValue() == null ) { continue; } if ( Queue.QUEUE_ID.equals( property.getKey() ) || QUEUE_NEWEST.equals( property.getKey() ) || QUEUE_OLDEST .equals( property.getKey() ) ) { continue; } if ( QUEUE_PROPERTIES.containsKey( property.getKey() ) ) { columns.put( bytebuffer( property.getKey() ), bytebuffer( property.getValue() ) ); } else { columns.put( bytebuffer( property.getKey() ), JsonUtils.toByteBuffer( property.getValue() ) ); } } return columns; }
public static void addInsertToMutator( Mutator<ByteBuffer> m, Object columnFamily, Object key, Object columnName, Object columnValue, long timestamp ) { logBatchOperation( "Insert", columnFamily, key, columnName, columnValue, timestamp ); if ( columnName instanceof List<?> ) { columnName = DynamicComposite.toByteBuffer( ( List<?> ) columnName ); } if ( columnValue instanceof List<?> ) { columnValue = DynamicComposite.toByteBuffer( ( List<?> ) columnValue ); } HColumn<ByteBuffer, ByteBuffer> column = createColumn( bytebuffer( columnName ), bytebuffer( columnValue ), timestamp, be, be ); m.addInsertion( bytebuffer( key ), columnFamily.toString(), column ); }
public static ByteBuffer toStorableBinaryValue( Object obj, boolean forceJson ) { obj = toStorableValue( obj ); if ( ( obj instanceof JsonNode ) || ( forceJson && ( obj != null ) && !( obj instanceof ByteBuffer ) ) ) { return JsonUtils.toByteBuffer( obj ); } else { return bytebuffer( obj ); } }
public void batchUnsubscribeFromQueue( Mutator<ByteBuffer> batch, String publisherQueuePath, UUID publisherQueueId, String subscriberQueuePath, UUID subscriberQueueId, long timestamp ) { batch.addDeletion( bytebuffer( publisherQueueId ), QUEUE_SUBSCRIBERS.getColumnFamily(), subscriberQueuePath, se, timestamp ); batch.addDeletion( bytebuffer( subscriberQueueId ), QUEUE_SUBSCRIPTIONS.getColumnFamily(), publisherQueuePath, se, timestamp ); }
/** * Delete row. * * @param ko the keyspace * @param columnFamily the column family * @param key the key * * @throws Exception the exception */ public void deleteRow( Keyspace ko, final Object columnFamily, final Object key ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.trace( "deleteRow cf={} key={}", columnFamily, key ); } CountingMutator.createFlushingMutator( ko, be ).addDeletion( bytebuffer( key ), columnFamily.toString() ).execute(); }
public static List<ColumnDefinition> getIndexMetadata( String indexes ) { if ( indexes == null ) { return null; } String[] index_entries = split( indexes, ',' ); List<ColumnDef> columns = new ArrayList<ColumnDef>(); for ( String index_entry : index_entries ) { String column_name = stringOrSubstringBeforeFirst( index_entry, ':' ).trim(); String comparer = substringAfterLast( index_entry, ":" ).trim(); if ( StringUtils.isBlank( comparer ) ) { comparer = "UUIDType"; } if ( StringUtils.isNotBlank( column_name ) ) { ColumnDef cd = new ColumnDef( bytebuffer( column_name ), comparer ); cd.setIndex_name( column_name ); cd.setIndex_type( IndexType.KEYS ); columns.add( cd ); } } return ThriftColumnDef.fromThriftList( columns ); }
public void setColumn( Keyspace ko, Object columnFamily, Object key, Object columnName, Object columnValue, int ttl ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.trace( "setColumn cf={} key={} name={} value={}", columnFamily, key, columnName, columnValue ); } ByteBuffer name_bytes = null; if ( columnName instanceof List ) { name_bytes = DynamicComposite.toByteBuffer( ( List<?> ) columnName ); } else { name_bytes = bytebuffer( columnName ); } ByteBuffer value_bytes = null; if ( columnValue instanceof List ) { value_bytes = DynamicComposite.toByteBuffer( ( List<?> ) columnValue ); } else { value_bytes = bytebuffer( columnValue ); } HColumn<ByteBuffer, ByteBuffer> col = createColumn( name_bytes, value_bytes, be, be ); if ( ttl != 0 ) { col.setTtl( ttl ); } Mutator<ByteBuffer> m = CountingMutator.createFlushingMutator( ko, be ); m.insert( bytebuffer( key ), columnFamily.toString(), col ); }
public static Mutator<ByteBuffer> addQueueToMutator( Mutator<ByteBuffer> m, Queue queue, long timestamp ) { Map<ByteBuffer, ByteBuffer> columns = serializeQueue( queue ); if ( columns == null ) { return m; } for ( Map.Entry<ByteBuffer, ByteBuffer> column_entry : columns.entrySet() ) { if ( ( column_entry.getValue() != null ) && column_entry.getValue().hasRemaining() ) { HColumn<ByteBuffer, ByteBuffer> column = createColumn( column_entry.getKey(), column_entry.getValue(), timestamp, be, be ); m.addInsertion( bytebuffer( queue.getUuid() ), QueuesCF.QUEUE_PROPERTIES.toString(), column ); } else { m.addDeletion( bytebuffer( queue.getUuid() ), QueuesCF.QUEUE_PROPERTIES.toString(), column_entry.getKey(), be, timestamp ); } } return m; }
public static Mutator<ByteBuffer> addMessageToMutator( Mutator<ByteBuffer> m, Message message, long timestamp ) { Map<ByteBuffer, ByteBuffer> columns = serializeMessage( message ); if ( columns == null ) { return m; } for ( Map.Entry<ByteBuffer, ByteBuffer> column_entry : columns.entrySet() ) { if ( ( column_entry.getValue() != null ) && column_entry.getValue().hasRemaining() ) { HColumn<ByteBuffer, ByteBuffer> column = createColumn( column_entry.getKey(), column_entry.getValue(), timestamp, be, be ); m.addInsertion( bytebuffer( message.getUuid() ), QueuesCF.MESSAGE_PROPERTIES.toString(), column ); } else { m.addDeletion( bytebuffer( message.getUuid() ), QueuesCF.MESSAGE_PROPERTIES.toString(), column_entry.getKey(), be, timestamp ); } } return m; }
public void batchSubscribeToQueue( Mutator<ByteBuffer> batch, String publisherQueuePath, UUID publisherQueueId, String subscriberQueuePath, UUID subscriberQueueId, long timestamp ) { batch.addInsertion( bytebuffer( publisherQueueId ), QUEUE_SUBSCRIBERS.getColumnFamily(), createColumn( subscriberQueuePath, subscriberQueueId, timestamp, se, ue ) ); batch.addInsertion( bytebuffer( subscriberQueueId ), QUEUE_SUBSCRIPTIONS.getColumnFamily(), createColumn( publisherQueuePath, publisherQueueId, timestamp, se, ue ) ); }
public static ByteBuffer serializeEntityProperty( String entityType, String propertyName, Object propertyValue ) { ByteBuffer bytes = null; if ( PROPERTY_UUID.equals( propertyName ) ) { bytes = bytebuffer( uuid( propertyValue ) ); } else if ( PROPERTY_TYPE.equals( propertyName ) ) { bytes = bytebuffer( string( propertyValue ) ); } else { bytes = Schema.serializePropertyValueToJsonBinary( toJsonNode( propertyValue ) ); if ( Schema.getDefaultSchema().isPropertyEncrypted( entityType, propertyName ) ) { bytes.rewind(); bytes = encrypt( bytes ); } } return bytes; }
public void addToMutation( Mutator<ByteBuffer> batch, UUID queueId, long shard_ts, long timestamp ) { if ( propertyEntryList != null ) { for ( Entry<String, List<Entry<String, Object>>> property : propertyEntryList.entrySet() ) { for ( Map.Entry<String, Object> indexEntry : property.getValue() ) { if ( validIndexableValue( indexEntry.getValue() ) ) { batch.addInsertion( bytebuffer( key( queueId, shard_ts, indexEntry.getKey() ) ), PROPERTY_INDEX.getColumnFamily(), createColumn( new DynamicComposite( indexValueCode( indexEntry.getValue() ), indexEntry.getValue(), message.getUuid() ), ByteBuffer.allocate( 0 ), timestamp, dce, be ) ); batch.addInsertion( bytebuffer( key( queueId, DICTIONARY_MESSAGE_INDEXES ) ), QUEUE_DICTIONARIES.getColumnFamily(), createColumn( indexEntry.getKey(), ByteBuffer.allocate( 0 ), timestamp, se, be ) ); } } batch.addInsertion( bytebuffer( key( queueId, DICTIONARY_MESSAGE_INDEXES ) ), QUEUE_DICTIONARIES.getColumnFamily(), createColumn( property.getKey(), ByteBuffer.allocate( 0 ), timestamp, se, be ) ); } } }
public <N, V> ColumnSlice<N, V> getColumns( Keyspace ko, Object columnFamily, Object key, N[] columns, Serializer<N> nameSerializer, Serializer<V> valueSerializer ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.trace( "getColumn cf={} key={} column={}", columnFamily, key, columns ); } /* * ByteBuffer column_bytes = null; if (column instanceof List) { * column_bytes = Composite.serializeToByteBuffer((List<?>) column); } else * { column_bytes = bytebuffer(column); } */ SliceQuery<ByteBuffer, N, V> q = HFactory.createSliceQuery( ko, be, nameSerializer, valueSerializer ); QueryResult<ColumnSlice<N, V>> r = q.setKey( bytebuffer( key ) ).setColumnNames( columns ).setColumnFamily( columnFamily.toString() ) .execute(); ColumnSlice<N, V> result = r.get(); if ( db_logger.isTraceEnabled() ) { if ( result == null ) { db_logger.trace( "getColumn returned null" ); } } return result; }
public Mutator<ByteBuffer> batchIncrementQueueCounter( Mutator<ByteBuffer> m, UUID queueId, String name, long value, long timestamp, UUID applicationId ) { if ( logger.isTraceEnabled() ) { logger.trace( "BIQC: Incrementing property {} of queue {} by value {}", name, queueId, value ); } m.addInsertion( bytebuffer( key( queueId, DICTIONARY_COUNTERS ).toString() ), QueuesCF.QUEUE_DICTIONARIES.toString(), createColumn( name, ByteBuffer.allocate( 0 ), timestamp, se, be ) ); if ( "o".equals( counterType ) || "p".equals( counterType ) ) { HCounterColumn<String> c = createCounterColumn( name, value ); ByteBuffer keybytes = bytebuffer( queueId ); m.addCounter( keybytes, QueuesCF.COUNTERS.toString(), c ); } if ( "n".equals( counterType ) || "p".equals( counterType ) ) { PrefixedSerializer ps = new PrefixedSerializer( applicationId, ue, ue ); batcher.add( new Count( QueuesCF.COUNTERS.toString(), ps.toByteBuffer( queueId ), name, value ) ); } return m; }
private void handleAggregateCounterRow( Mutator<ByteBuffer> m, String key, long column, long value, UUID applicationId ) { if ( logger.isTraceEnabled() ) { logger.trace( "HACR: aggregateRow for app {} with key {} column {} and value {}", applicationId, key, column, value ); } if ( "o".equals( counterType ) || "p".equals( counterType ) ) { if ( m != null ) { HCounterColumn<Long> c = createCounterColumn( column, value, le ); m.addCounter( bytebuffer( key ), APPLICATION_AGGREGATE_COUNTERS.toString(), c ); } } if ( "n".equals( counterType ) || "p".equals( counterType ) ) { // create and add Count PrefixedSerializer ps = new PrefixedSerializer( applicationId, ue, se ); batcher.add( new Count( APPLICATION_AGGREGATE_COUNTERS.toString(), ps.toByteBuffer( key ), column, value ) ); } }
private Mutator<ByteBuffer> batchIncrementEntityCounter( Mutator<ByteBuffer> m, UUID entityId, String name, Long value, long timestamp, UUID applicationId ) { if ( logger.isTraceEnabled() ) { logger.trace( "BIEC: Incrementing property {} of entity {} by value {}", name, entityId, value ); } addInsertToMutator( m, ENTITY_DICTIONARIES, key( entityId, DICTIONARY_COUNTERS ), name, null, timestamp ); if ( "o".equals( counterType ) || "p".equals( counterType ) ) { HCounterColumn<String> c = createCounterColumn( name, value ); m.addCounter( bytebuffer( entityId ), ENTITY_COUNTERS.toString(), c ); } if ( "n".equals( counterType ) || "p".equals( counterType ) ) { PrefixedSerializer ps = new PrefixedSerializer( applicationId, ue, ue ); batcher.add( new Count( ENTITY_COUNTERS.toString(), ps.toByteBuffer( entityId ), name, value ) ); } return m; }
@Override public Queue updateQueue( String queuePath, Queue queue ) { queue.setPath( queuePath ); UUID timestampUuid = newTimeUUID(); long timestamp = getTimestampInMicros( timestampUuid ); Mutator<ByteBuffer> batch = CountingMutator.createFlushingMutator( cass.getApplicationKeyspace( applicationId ), be ); addQueueToMutator( batch, queue, timestamp ); try { batchUpdateQueuePropertiesIndexes( batch, queuePath, queue.getUuid(), queue.getProperties(), timestampUuid ); } catch ( Exception e ) { logger.error( "Unable to update queue", e ); } batch.addInsertion( bytebuffer( queue.getUuid() ), QUEUE_PROPERTIES.getColumnFamily(), createColumn( QUEUE_CREATED, timestamp / 1000, Long.MAX_VALUE - timestamp, se, le ) ); batch.addInsertion( bytebuffer( queue.getUuid() ), QUEUE_PROPERTIES.getColumnFamily(), createColumn( QUEUE_MODIFIED, timestamp / 1000, timestamp, se, le ) ); batchExecute( batch, RETRY_COUNT ); return queue; }