private void runQuery(K start, K end) { query.setKeys(start, end); if(!firstRun) { query.setRowCount(rowCount); } rowsIterator = null; QueryResult<OrderedRows<K, String, String>> result = query.execute(); OrderedRows<K, String, String> rows = (result != null) ? result.get() : null; rowsIterator = (rows != null) ? rows.iterator() : null; // we'll skip this first one, since it is the same as the last one from previous time we executed if (!firstRun && rowsIterator != null) rowsIterator.next(); firstRun = false; if (rowsIterator != null && !rowsIterator.hasNext()) { nextValue = null; // all done. our iterator's hasNext() will now return false; } else { findNext(true); } }
private void refresh() { query.setKeys(startKey, endKey); iterator = Iterators.peekingIterator(query.execute().get().getList().iterator()); rows = 0; if (iterator.hasNext()) { // First element is startKey which was the last element on the previous query result - skip it next(); } } }
@Override public boolean hasNext() { if (iterator == null) { // First time through iterator = Iterators.peekingIterator(query.execute().get().getList().iterator()); } else if (!iterator.hasNext() && rows == query.getRowCount()) { // only need to do another query if maximum rows were retrieved query.setKeys(startKey, endKey); iterator = Iterators.peekingIterator(query.execute().get().getList().iterator()); rows = 0; if (iterator.hasNext()) { // First element is startKey which was the last element on the previous query result - skip it next(); } } while(filter != null && iterator != null && iterator.hasNext() && !filter.accept(iterator.peek())) { next(); if(!iterator.hasNext() && rows == query.getRowCount()) { refresh(); } } return iterator.hasNext(); }
@Test public void testKeysOnlyPredicate() { RangeSlicesQuery<String, String, Long> rangeSlicesQuery = HFactory.createRangeSlicesQuery(keyspace, se, se, le); QueryResult<OrderedRows<String, String, Long>> result = rangeSlicesQuery.setColumnFamily(cf).setKeys("", "").setReturnKeysOnly().execute(); OrderedRows<String, String, Long> orderedRows = result.get(); Row<String, String, Long> row = orderedRows.iterator().next(); assertNotNull(row.getKey()); assertEquals(0,row.getColumnSlice().getColumns().size()); result = rangeSlicesQuery.setColumnNames("birthyear","birthmonth").setRowCount(5).execute(); orderedRows = result.get(); row = orderedRows.iterator().next(); assertNotNull(row.getKey()); assertEquals(2,row.getColumnSlice().getColumns().size()); }
@Test public void testInsertGetRemove() { RangeSlicesQuery<String, String, Long> rangeSlicesQuery = HFactory.createRangeSlicesQuery(keyspace, se, se, le); rangeSlicesQuery.addEqualsExpression("birthyear", 1975L); rangeSlicesQuery.setColumnNames("birthmonth"); rangeSlicesQuery.setReturnKeysOnly(); rangeSlicesQuery.setColumnFamily(cf); rangeSlicesQuery.setKeys("", ""); QueryResult<OrderedRows<String, String, Long>> result = rangeSlicesQuery.execute(); assertEquals(4, result.get().getList().size()); }
private void runQuery(K start, K end) { query.setKeys(start, end); rowsIterator = null; QueryResult<OrderedRows<K, String, String>> result = query.execute(); OrderedRows<K, String, String> rows = (result != null) ? result.get() : null; rowsIterator = (rows != null) ? rows.iterator() : null; // we'll skip this first one, since it is the same as the last one from previous time we executed if (!firstRun && rowsIterator != null) rowsIterator.next(); firstRun = false; if (!rowsIterator.hasNext()) { nextValue = null; // all done. our iterator's hasNext() will now return false; } else { findNext(true); } }
private void refresh() { query.setKeys(startKey, endKey); iterator = Iterators.peekingIterator(query.execute().get().getList().iterator()); rows = 0; if (iterator.hasNext()) { // First element is startKey which was the last element on the previous query result - skip it next(); } } }
@Override public QueryResult<OrderedRows<String,String,String>> execute() { RangeSlicesQuery<String, String, String> rangeSlicesQuery = HFactory.createRangeSlicesQuery(keyspace, stringSerializer, stringSerializer, stringSerializer); rangeSlicesQuery.setColumnFamily("Npanxx"); rangeSlicesQuery.setColumnNames("city","state","lat","lng"); rangeSlicesQuery.setKeys("512202", "512205"); rangeSlicesQuery.setRowCount(5); QueryResult<OrderedRows<String, String, String>> results = rangeSlicesQuery.execute(); return results; }
@Override public K getKey(final V value) throws DataAccessLayerException { RangeSlicesQuery<K, byte[], V> rq = HFactory.createRangeSlicesQuery( _keyspace, _serializer_k, BYTE_SERIALIZER, _serializer_v); rq.addEqualsExpression(COLUMN_NAME, value).setReturnKeysOnly() .setColumnFamily(_cf_name) .setColumnNames(COLUMN_NAME).setRowCount(1); final List<Row<K, byte[], V>> rows = rq.execute().get().getList(); return rows.isEmpty() ? null : _serializer_k.fromBytes((byte[])rows.get(0).getKey()); }
@Override public K getKey(final V value) throws DataAccessLayerException { RangeSlicesQuery<K, byte[], V> rq = HFactory.createRangeSlicesQuery( _keyspace, _serializer_k, BYTE_SERIALIZER, _serializer_v); rq.addEqualsExpression(COLUMN_NAME, value).setReturnKeysOnly() .setColumnFamily(_cf_name) .setColumnNames(COLUMN_NAME).setRowCount(1); final List<Row<K, byte[], V>> rows = rq.execute().get().getList(); return rows.isEmpty() ? null : _serializer_k.fromBytes((byte[])rows.get(0).getKey()); }
@Override public List<String> getMetadataFacets( final String repositoryId, final String facetId ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .execute(); final List<String> facets = new ArrayList<>(); for ( Row<String, String, String> row : result.get() ) { facets.add( getStringValue( row.getColumnSlice(), NAME.toString() ) ); } return facets; }
protected List<String> getNamespaces( final String repoId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .execute(); List<String> namespaces = new ArrayList<>( result.get().getCount() ); for ( Row<String, String, String> row : result.get() ) { namespaces.add( getStringValue( row.getColumnSlice(), NAME.toString() ) ); } return namespaces; }
@Override public Collection<String> getProjects( final String repoId, final String namespace ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) // .setColumnNames( PROJECT_ID.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .execute(); final Set<String> projects = new HashSet<String>( result.get().getCount() ); for ( Row<String, String, String> row : result.get() ) { projects.add( getStringValue( row.getColumnSlice(), PROJECT_ID.toString() ) ); } return projects; }
protected void removeLicenses( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) // .setColumnNames( NAME.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.licenseTemplate.deleteRow( row.getKey() ); } }
protected void removeDependencies( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) // .setColumnNames( GROUP_ID.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.dependencyTemplate.deleteRow( row.getKey() ); } }
protected Repository getRepository( String repositoryId ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, StringSerializer.get(), StringSerializer.get(), StringSerializer.get() ) // .setColumnFamily( cassandraArchivaManager.getRepositoryFamilyName() ) // .setColumnNames( REPOSITORY_NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .execute(); return ( result.get().getCount() > 0 ) ? new Repository( repositoryId ) : null; }
@Override public void removeMetadataFacets( final String repositoryId, final String facetId ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( KEY.toString(), VALUE.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.metadataFacetTemplate.deleteRow( row.getKey() ); } }
protected Namespace getNamespace( String repositoryId, String namespaceId ) { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( REPOSITORY_NAME.toString(), NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( NAME.toString(), namespaceId ) // .execute(); if ( result.get().getCount() > 0 ) { ColumnSlice<String, String> columnSlice = result.get().getList().get( 0 ).getColumnSlice(); return new Namespace( getStringValue( columnSlice, NAME.toString() ), // new Repository( getStringValue( columnSlice, REPOSITORY_NAME.toString() ) ) ); } return null; }
@Override public void removeMetadataFacet( final String repositoryId, final String facetId, final String name ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( KEY.toString(), VALUE.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .addEqualsExpression( NAME.toString(), name ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.metadataFacetTemplate.deleteRow( row.getKey() ); } }