private static Object deserializeMapEntryKey(ByteBuffer bb){ List<Object> stuff = new ArrayList<>(); while(bb.hasRemaining()){ ByteBuffer data = CQLUtils.getWithShortLength(bb); if(stuff.size() == 0){ stuff.add(DataType.uuid().deserialize(data.slice(), ProtocolVersion.NEWEST_SUPPORTED)); }else{ stuff.add(DataType.text().deserialize(data.slice(), ProtocolVersion.NEWEST_SUPPORTED)); } byte equality = bb.get(); // we don't use this but take the equality byte off the buffer } return stuff; }
@Override protected List<Object> deserializePartitionKey(ByteBuffer bb){ /** * List<Object> keys = new ArrayList<>(6); keys.add(0, appUUID); // UUID keys.add(1, applicationType); // String keys.add(2, entityType); // String keys.add(3, fieldType); // String keys.add(4, fieldName); // String keys.add(5, fieldValueString); // String */ List<Object> stuff = new ArrayList<>(); while(bb.hasRemaining()){ ByteBuffer data = CQLUtils.getWithShortLength(bb); if(stuff.size() == 0){ stuff.add(DataType.uuid().deserialize(data.slice(), ProtocolVersion.NEWEST_SUPPORTED)); }else{ stuff.add(DataType.text().deserialize(data.slice(), ProtocolVersion.NEWEST_SUPPORTED)); } byte equality = bb.get(); // we don't use this but take the equality byte off the buffer } return stuff; }
@Override protected List<Object> deserializePartitionKey(ByteBuffer bb){ /** * List<Object> keys = new ArrayList<>(8); keys.add(0, appUUID); keys.add(1, applicationType); keys.add(2, appUUID); keys.add(3, applicationType); keys.add(4, entityType); keys.add(5, fieldType); keys.add(6, fieldName); keys.add(7, fieldValueString); */ int count = 0; List<Object> stuff = new ArrayList<>(); while(bb.hasRemaining()){ ByteBuffer data = CQLUtils.getWithShortLength(bb); if(count == 0 || count == 2){ stuff.add(DataType.uuid().deserialize(data.slice(), ProtocolVersion.NEWEST_SUPPORTED)); }else{ stuff.add(DataType.text().deserialize(data.slice(), ProtocolVersion.NEWEST_SUPPORTED)); } byte equality = bb.get(); // we don't use this but take the equality byte off the buffer count++; } return stuff; }
@Override public UUID getUuid( final MapScope scope, final String key ) { ByteBuffer value = getValueCQL( scope, key, cassandraConfig.getDataStaxReadCl() ); return value != null ? (UUID)DataType.uuid().deserialize(value, ProtocolVersion.NEWEST_SUPPORTED ) : null; }
protected static String getPrimitiveAvroTypeFromCassandraType(DataType dataType) { // Map types from Cassandra to Avro where possible if (dataType.equals(DataType.ascii()) || dataType.equals(DataType.text()) || dataType.equals(DataType.varchar()) // Nonstandard types represented by this processor as a string || dataType.equals(DataType.timestamp()) || dataType.equals(DataType.timeuuid()) || dataType.equals(DataType.uuid()) || dataType.equals(DataType.inet()) || dataType.equals(DataType.varint())) { return "string"; } else if (dataType.equals(DataType.cboolean())) { return "boolean"; } else if (dataType.equals(DataType.cint())) { return "int"; } else if (dataType.equals(DataType.bigint()) || dataType.equals(DataType.counter())) { return "long"; } else if (dataType.equals(DataType.cfloat())) { return "float"; } else if (dataType.equals(DataType.cdouble())) { return "double"; } else if (dataType.equals(DataType.blob())) { return "bytes"; } else { throw new IllegalArgumentException("createSchema: Unknown Cassandra data type " + dataType.getName() + " cannot be converted to Avro type"); } }
@Override public Map<String, Object> getTokenInfo(UUID tokenUUID){ Preconditions.checkNotNull(tokenUUID, "token UUID is required"); List<ByteBuffer> tokenProperties = new ArrayList<>(); TOKEN_PROPERTIES.forEach( prop -> tokenProperties.add(DataType.serializeValue(prop, ProtocolVersion.NEWEST_SUPPORTED))); final ByteBuffer key = DataType.uuid().serialize(tokenUUID, ProtocolVersion.NEWEST_SUPPORTED); final Clause inKey = QueryBuilder.eq("key", key); final Clause inColumn = QueryBuilder.in("column1", tokenProperties ); final Statement statement = QueryBuilder.select().all().from(TOKENS_TABLE) .where(inKey) .and(inColumn) .setConsistencyLevel(cassandraConfig.getDataStaxReadCl()); final ResultSet resultSet = session.execute(statement); final List<Row> rows = resultSet.all(); Map<String, Object> tokenInfo = new HashMap<>(); rows.forEach( row -> { final String name = (String)DataType.text() .deserialize(row.getBytes("column1"), ProtocolVersion.NEWEST_SUPPORTED); final Object value = deserializeColumnValue(name, row.getBytes("value")); if (value == null){ throw new RuntimeException("error deserializing token info for property: "+name); } tokenInfo.put(name, value); }); logger.trace("getTokenInfo, info: {}", tokenInfo); return tokenInfo; }
@Override public void deleteTokens(final List<UUID> tokenUUIDs, final ByteBuffer principalKeyBuffer){ Preconditions.checkNotNull(tokenUUIDs, "token UUID list is required"); Preconditions.checkNotNull(tokenUUIDs, "principalKeyBuffer is required"); logger.trace("deleteTokens, token UUIDs: {}", tokenUUIDs); final BatchStatement batchStatement = new BatchStatement(); tokenUUIDs.forEach( tokenUUID -> batchStatement.add( QueryBuilder.delete() .from(TOKENS_TABLE) .where(QueryBuilder .eq("key", DataType.uuid().serialize(tokenUUID, ProtocolVersion.NEWEST_SUPPORTED))) ) ); batchStatement.add( QueryBuilder.delete() .from(PRINCIPAL_TOKENS_TABLE) .where(QueryBuilder .eq("key", principalKeyBuffer))); session.execute(batchStatement); }
@Override public void revokeToken(final UUID tokenUUID, final ByteBuffer principalKeyBuffer){ Preconditions.checkNotNull(tokenUUID, "token UUID is required"); logger.trace("revokeToken, token UUID: {}", tokenUUID); final BatchStatement batchStatement = new BatchStatement(); batchStatement.add( QueryBuilder.delete() .from(TOKENS_TABLE) .where(QueryBuilder .eq("key", DataType.uuid().serialize(tokenUUID, ProtocolVersion.NEWEST_SUPPORTED)))); if(principalKeyBuffer != null){ batchStatement.add( QueryBuilder.delete() .from(PRINCIPAL_TOKENS_TABLE) .where(QueryBuilder .eq("key", principalKeyBuffer)) .and(QueryBuilder .eq("column1", DataType.uuid().serialize(tokenUUID, ProtocolVersion.NEWEST_SUPPORTED)))); } session.execute(batchStatement); }
private Object deserializeColumnValue(final String name, final ByteBuffer bb){ switch (name) { case TOKEN_TYPE: case TOKEN_PRINCIPAL_TYPE: return DataType.text().deserialize(bb, ProtocolVersion.NEWEST_SUPPORTED); case TOKEN_CREATED: case TOKEN_ACCESSED: case TOKEN_INACTIVE: case TOKEN_DURATION: return DataType.bigint().deserialize(bb, ProtocolVersion.NEWEST_SUPPORTED); case TOKEN_ENTITY: case TOKEN_APPLICATION: case TOKEN_WORKFLOW_ORG_ID: case TOKEN_UUID: return DataType.uuid().deserialize(bb, ProtocolVersion.NEWEST_SUPPORTED); case TOKEN_STATE: return fromByteBuffer(bb, Object.class); } return null; }
|| mainType.equals(DataType.varchar()) || mainType.equals(DataType.timeuuid()) || mainType.equals(DataType.uuid()) || mainType.equals(DataType.inet()) || mainType.equals(DataType.varint())) {
@Override public void putUuid( final MapScope scope, final String key, final UUID putUuid ) { Preconditions.checkNotNull( scope, "mapscope is required" ); Preconditions.checkNotNull( key, "key is required" ); Preconditions.checkNotNull( putUuid, "value is required" ); final BatchStatement batchStatement = new BatchStatement(); batchStatement.add(QueryBuilder.insertInto(MAP_ENTRIES_TABLE) .value("key", getMapEntryPartitionKey(scope, key)) .value("column1", DataType.cboolean().serialize(true, ProtocolVersion.NEWEST_SUPPORTED)) .value("value", DataType.uuid().serialize(putUuid, ProtocolVersion.NEWEST_SUPPORTED))); final int bucket = BUCKET_LOCATOR.getCurrentBucket( scope.getName() ); batchStatement.add(QueryBuilder.insertInto(MAP_KEYS_TABLE) .value("key", getMapKeyPartitionKey(scope, bucket)) .value("column1", DataType.text().serialize(key, ProtocolVersion.NEWEST_SUPPORTED)) .value("value", DataType.serializeValue(null, ProtocolVersion.NEWEST_SUPPORTED))); session.execute(batchStatement); }
private UUIDCodec() { super(DataType.uuid()); } }
QueryBuilder.eq("key", DataType.uuid().serialize(tokenUUID, ProtocolVersion.NEWEST_SUPPORTED)); final Clause whereTokenAccessed = QueryBuilder.eq("column1", DataType.serializeValue(TOKEN_ACCESSED, ProtocolVersion.NEWEST_SUPPORTED));
private String getCreateWorkflowsTableStatement() { return SchemaBuilder.createTable(config.getCassandraKeyspace(), TABLE_WORKFLOWS) .ifNotExists() .addPartitionKey(WORKFLOW_ID_KEY, DataType.uuid()) .addPartitionKey(SHARD_ID_KEY, DataType.cint()) .addClusteringColumn(ENTITY_KEY, DataType.text()) .addClusteringColumn(TASK_ID_KEY, DataType.text()) .addColumn(PAYLOAD_KEY, DataType.text()) .addStaticColumn(TOTAL_TASKS_KEY, DataType.cint()) .addStaticColumn(TOTAL_PARTITIONS_KEY, DataType.cint()) .getQueryString(); }
@Test(groups = "unit", expectedExceptions = IllegalArgumentException.class) public void should_fail_when_blank_clustering_order_column_provided() throws Exception { createTable("test") .addPartitionKey("id", DataType.bigint()) .addClusteringColumn("col1", DataType.uuid()) .addClusteringColumn("col2", DataType.uuid()) .addColumn("name", DataType.text()) .withOptions() .clusteringOrder("", Direction.DESC); }
@Test(groups = "unit", expectedExceptions = IllegalArgumentException.class) public void should_fail_when_clustering_order_column_does_not_match_declared_clustering_keys() throws Exception { createTable("test") .addPartitionKey("id", DataType.bigint()) .addClusteringColumn("col1", DataType.uuid()) .addClusteringColumn("col2", DataType.uuid()) .addColumn("name", DataType.text()) .withOptions() .clusteringOrder("col3", Direction.ASC); }
@Test( groups = "unit", expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "The column name 'ADD' is not allowed because it is a reserved keyword") public void should_fail_if_simple_column_is_a_reserved_keyword() throws Exception { createTable("test") .addPartitionKey("pk", DataType.bigint()) .addClusteringColumn("cluster", DataType.uuid()) .addColumn("ADD", DataType.text()) .getQueryString(); }
@Test( groups = "unit", expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "The clustering column name 'ADD' is not allowed because it is a reserved keyword") public void should_fail_if_clustering_key_is_a_reserved_keyword() throws Exception { createTable("test") .addPartitionKey("pk", DataType.bigint()) .addClusteringColumn("ADD", DataType.uuid()) .addColumn("col", DataType.text()) .getQueryString(); }
@Test( groups = "unit", expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "The static column name 'ADD' is not allowed because it is a reserved keyword") public void should_fail_if_static_column_is_a_reserved_keyword() throws Exception { createTable("test") .addPartitionKey("pk", DataType.bigint()) .addClusteringColumn("cluster", DataType.uuid()) .addStaticColumn("ADD", DataType.text()) .addColumn("col", DataType.text()) .getQueryString(); }