Log.warn("Error parsing tdrive file: " + input.getPath(), e); return new CloseableIterator.Empty<>();
final SortedSet<MemoryMetadataEntry> typeStore = metadataStore.get(type); if (typeStore == null) { return new CloseableIterator.Empty<>();
(Aggregation<?, Object, Object>) aggregation.getValue(); if ((reader == null) || !reader.hasNext()) { return new CloseableIterator.Empty(); } else { while (reader.hasNext()) {
+ query.getExtendedId() + "' if statistic type is not provided"); return new CloseableIterator.Empty<>(); if (adapterId == null) { LOGGER.error("DataTypeAdapter does not exist for type '" + query.getTypeName() + "'"); return new CloseableIterator.Empty<>();
delete); if (reader == null) { return new CloseableIterator.Empty();
e); return new CloseableIterator.Empty<AvroSimpleFeatureCollection>();
public synchronized CloseableIterator<GeoWaveRow> iterator(final ByteArrayRange range) { final RocksDB readDb = getReadDb(); if (readDb == null) { return new CloseableIterator.Empty<>(); } final ReadOptions options; final RocksIterator it; if (range.getEnd() == null) { options = null; it = readDb.newIterator(); } else { options = new ReadOptions().setIterateUpperBound(new Slice(range.getEndAsNextPrefix())); it = readDb.newIterator(options); } if (range.getStart() == null) { it.seekToFirst(); } else { it.seek(range.getStart()); } return new RocksDBRowIterator( options, it, adapterId, partition, requiresTimestamp, visibilityEnabled); } }
public synchronized CloseableIterator<GeoWaveRow> dataIndexIterator(final byte[][] dataIds) { final RocksDB readDb = getReadDb(); if (readDb == null) { return new CloseableIterator.Empty<>(); } try { final List<byte[]> dataIdsList = Arrays.asList(dataIds); final Map<byte[], byte[]> dataIdxResults = readDb.multiGet(dataIdsList); return new CloseableIterator.Wrapper( dataIdsList.stream().map( dataId -> DataIndexUtils.deserializeDataIndexRow( dataId, adapterId, dataIdxResults.get(dataId), visibilityEnabled)).iterator()); } catch (final RocksDBException e) { LOGGER.error("Unable to get values by data ID", e); } return new CloseableIterator.Empty<>(); } }
public static CloseableIterator<Object> aggregate( final CloseableIterator<Object> it, final Aggregation<?, ?, Object> aggregationFunction) { if ((it != null) && it.hasNext()) { try { synchronized (aggregationFunction) { aggregationFunction.clearResult(); while (it.hasNext()) { final Object input = it.next(); if (input != null) { aggregationFunction.aggregate(input); } } } } finally { it.close(); } return new Wrapper(Iterators.singletonIterator(aggregationFunction.getResult())); } return new CloseableIterator.Empty(); }
public synchronized CloseableIterator<GeoWaveRow> iterator() { final RocksDB readDb = getReadDb(); if (readDb == null) { return new CloseableIterator.Empty<>(); } final ReadOptions options = new ReadOptions().setFillCache(false); final RocksIterator it = readDb.newIterator(options); it.seekToFirst(); return new RocksDBRowIterator( options, it, adapterId, partition, requiresTimestamp, visibilityEnabled); }
@Override public CloseableIterator<AvroWholeFile> toAvroObjects(final URL f) { try { // TODO: consider a streaming mechanism in case a single file is too // large return new CloseableIterator.Wrapper<AvroWholeFile>( Iterators.singletonIterator( new AvroWholeFile(ByteBuffer.wrap(IOUtils.toByteArray(f)), f.getPath()))); } catch (final IOException e) { LOGGER.warn("Unable to read file", e); } return new CloseableIterator.Empty<>(); } }
public CloseableIterator<SimpleFeature> getNoData() { return new CloseableIterator.Empty<>(); }
protected CloseableIterator<T> internalGetObjects(final MetadataQuery query) { try { if (!operations.metadataExists(getType())) { return new CloseableIterator.Empty<>(); } } catch (final IOException e1) { LOGGER.error("Unable to check for existence of metadata to get objects", e1); return new CloseableIterator.Empty<>(); } final MetadataReader reader = operations.createMetadataReader(getType()); final CloseableIterator<GeoWaveMetadata> it = reader.query(query); return new NativeIteratorWrapper(it, query.getAuthorizations()); }
@Override public CloseableIterator<AvroWholeFile> toAvroObjects(final URL input) { final AvroWholeFile avroFile = new AvroWholeFile(); avroFile.setOriginalFilePath(input.getPath()); try { avroFile.setOriginalFile(ByteBuffer.wrap(IOUtils.toByteArray(input))); } catch (final IOException e) { LOGGER.warn("Unable to read Twitter file: " + input.getPath(), e); return new CloseableIterator.Empty<>(); } return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(avroFile)); }
@Override public CloseableIterator<AvroWholeFile> toAvroObjects(final URL input) { final AvroWholeFile avroFile = new AvroWholeFile(); avroFile.setOriginalFilePath(input.getPath()); try { avroFile.setOriginalFile(ByteBuffer.wrap(IOUtils.toByteArray(input))); } catch (final IOException e) { LOGGER.warn("Unable to read GDELT file: " + input.getPath(), e); return new CloseableIterator.Empty<>(); } return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(avroFile)); }
@Override public CloseableIterator<AvroWholeFile> toAvroObjects(final URL input) { final AvroWholeFile avroFile = new AvroWholeFile(); avroFile.setOriginalFilePath(input.getPath()); try { avroFile.setOriginalFile(ByteBuffer.wrap(IOUtils.toByteArray(input))); } catch (final IOException e) { LOGGER.warn("Unable to read GeoLife file: " + input.getPath(), e); return new CloseableIterator.Empty<>(); } return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(avroFile)); }