Refine search
/** * Creates an iterable over all {@link org.apache.lucene.document.Document document}s in all partitions. * * @return LuceneAllDocumentsReader over all documents */ public LuceneAllDocumentsReader allDocumentsReader() { ensureOpen(); List<PartitionSearcher> searchers = new ArrayList<>( partitions.size() ); try { for ( AbstractIndexPartition partition : partitions ) { searchers.add( partition.acquireSearcher() ); } List<LucenePartitionAllDocumentsReader> partitionReaders = searchers.stream() .map( LucenePartitionAllDocumentsReader::new ) .collect( toList() ); return new LuceneAllDocumentsReader( partitionReaders ); } catch ( IOException e ) { IOUtils.closeAllSilently( searchers ); throw new UncheckedIOException( e ); } }
/** * Refresh all partitions to make newly inserted data visible for readers. * * @throws IOException */ public void maybeRefreshBlocking() throws IOException { try { getPartitions().parallelStream().forEach( this::maybeRefreshPartition ); } catch ( UncheckedIOException e ) { throw e.getCause(); } }
try { logFiles = Arrays.stream(topoDir.listFiles()) .flatMap(Unchecked.function(portDir -> directoryCleaner.getFilesForDir(portDir).stream())) .filter(File::isFile) .collect(toCollection(TreeSet::new)); } catch (UncheckedIOException e) { throw e.getCause(); .map(WorkerLogs::getTopologyPortWorkerLog) .filter(fileStr -> !StringUtils.equals(fileName, fileStr)) .collect(toList()); reorderedFilesStr.add(fileName); if (StringUtils.isNotEmpty(grep)) { String matchedString = String.join("\n", Arrays.stream(logString.split("\n")) .filter(str -> str.contains(grep)).collect(toList())); bodyContents.add(pre(matchedString).withId("logContent")); } else {
private static List<String> getImportOrder(File importsFile) { try (Stream<String> lines = Files.lines(importsFile.toPath())) { return lines.filter(line -> !line.startsWith("#")) // parse 0=input .map(ImportOrderStep::splitIntoIndexAndName) .sorted(Map.Entry.comparingByKey()) .map(Map.Entry::getValue) .collect(Collectors.toCollection(ArrayList::new)); } catch (IOException e) { throw new UncheckedIOException(e); } }
public static byte[] encodeOid(String oid) { requireNonNull(oid, "oid is null"); List<Integer> parts = Splitter.on('.').splitToList(oid).stream() .map(Integer::parseInt) .collect(toImmutableList()); checkArgument(parts.size() >= 2, "at least 2 parts are required"); try { ByteArrayOutputStream body = new ByteArrayOutputStream(); body.write(parts.get(0) * 40 + parts.get(1)); for (Integer part : parts.subList(2, parts.size())) { writeOidPart(body, part); } byte[] length = encodeLength(body.size()); ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write(OBJECT_IDENTIFIER_TAG); out.write(length); body.writeTo(out); return out.toByteArray(); } catch (IOException e) { // this won't happen with byte array output streams throw new UncheckedIOException(e); } }
private static boolean assertPathsDoNotExist(final Path[] paths) { Set<Path> existingPaths = Stream.of(paths) .filter(FileSystemUtils::exists) .filter(leftOver -> { // Relaxed assertion for the special case where only the empty state directory exists after deleting // the shard directory because it was created again as a result of a metadata read action concurrently. try (DirectoryStream<Path> children = Files.newDirectoryStream(leftOver)) { Iterator<Path> iter = children.iterator(); if (iter.hasNext() == false) { return true; } Path maybeState = iter.next(); if (iter.hasNext() || maybeState.equals(leftOver.resolve(MetaDataStateFormat.STATE_DIR_NAME)) == false) { return true; } try (DirectoryStream<Path> stateChildren = Files.newDirectoryStream(maybeState)) { return stateChildren.iterator().hasNext(); } } catch (IOException e) { throw new UncheckedIOException(e); } }).collect(Collectors.toSet()); assert existingPaths.size() == 0 : "Paths exist that should have been deleted: " + existingPaths; return existingPaths.size() == 0; }
protected final List<Result> convertFromBatchResult(List<Result> results) { assertTrue(results.size() % 2 == 0); return IntStream.range(0, results.size() / 2).mapToObj(i -> { try { return Result .createCompleteResult(Arrays.asList(results.get(2 * i), results.get(2 * i + 1))); } catch (IOException e) { throw new UncheckedIOException(e); } }).collect(Collectors.toList()); }
@Override public final void forEach(final BiConsumer<? super String, ? super byte[]> action) { if (Files.exists(cacheDirectory)) try (final Stream<Path> files = cacheContent().filter(Files::isReadable)) { files.forEach(file -> { try { action.accept(file.getFileName().toString(), Files.readAllBytes(file)); } catch (final IOException e) { throw new UncheckedIOException(e); } }); } }
private static List<HostAddress> getHostAddresses(BlockLocation blockLocation) { String[] hosts; try { hosts = blockLocation.getHosts(); } catch (IOException e) { throw new UncheckedIOException(e); } return Arrays.stream(hosts) .map(HostAddress::fromString) .collect(toImmutableList()); }
outputDir = file.getParent(); if (outputDir == null) { outputDir = file.resolveSibling(fileName); bundle.entries() .stream() .filter(entry -> filename == null || entry.name().equals(filename)) .forEach(uncheck(entry -> { progress.update(Optional.of(entry.name()), done.getAndIncrement() / (double) total); Path entryFile = outputDir.resolve(entry.name()); Files.createDirectories(entryFile.getParent()); Files.copy(entry.inputStream(), entryFile, REPLACE_EXISTING); throw ex.getCause();
private static void createAndInstallTempFolderForOutput( ErrorProneInMemoryFileManager fileManager) { Path tempDirectory; try { tempDirectory = Files.createTempDirectory( fileManager.fileSystem().getRootDirectories().iterator().next(), ""); } catch (IOException e) { throw new UncheckedIOException(e); } Arrays.stream(StandardLocation.values()) .filter(StandardLocation::isOutputLocation) .forEach( outputLocation -> { try { fileManager.setLocationFromPaths(outputLocation, ImmutableList.of(tempDirectory)); } catch (IOException e) { throw new UncheckedIOException(e); } }); }
wantedtemplate = Stream.of(templatePath) .map( i -> { try { return i.openStream(); } catch (IOException e) { throw new UncheckedIOException(e); .map( i -> new InputStreamReader(i, CharsetUtil.UTF_8)) .map( i -> { try { return localtemplate; } catch (IOException e) { throw new UncheckedIOException(e); .findFirst().orElseGet(() -> null); } catch (UncheckedIOException e) { logger.error("Can't load template definition: {}", e.getMessage()); logger.catching(Level.DEBUG, e); return false; return opt.map(i-> i != wantedVersion).orElseGet(() -> true); } catch (JsonProcessingException e) { throw new UncheckedIOException(e);
public JsonBenchmarkResultWriter(OutputStream outputStream) { requireNonNull(outputStream, "outputStream is null"); try { jsonGenerator = new JsonFactory().createGenerator(outputStream, JsonEncoding.UTF8); jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart("samples"); } catch (IOException e) { throw new UncheckedIOException(e); } }
@Override public void add( Collection<? extends IndexEntryUpdate<?>> updates ) { assert updatesForCorrectIndex( updates ); try { // Lucene documents stored in a ThreadLocal and reused so we can't create an eager collection of documents here // That is why we create a lazy Iterator and then Iterable writer.addDocuments( updates.size(), () -> updates.stream() .map( LuceneIndexPopulator::updateAsDocument ) .iterator() ); } catch ( IOException e ) { throw new UncheckedIOException( e ); } }
public void generate() throws Exception { initPlanTest(); try { getQueryResourcePaths() .parallel() .forEach(queryResourcePath -> { try { Path queryPlanWritePath = Paths.get( getSourcePath().toString(), "src/test/resources", getQueryPlanResourcePath(queryResourcePath)); createParentDirs(queryPlanWritePath.toFile()); write(generateQueryPlan(read(queryResourcePath)).getBytes(UTF_8), queryPlanWritePath.toFile()); System.out.println("Generated expected plan for query: " + queryResourcePath); } catch (IOException e) { throw new UncheckedIOException(e); } }); } finally { destroyPlanTest(); } }
@SuppressWarnings("StreamResourceLeak") private final Stream<Path> cacheContent() { try { return Files.walk(cacheDirectory).filter(Files::isRegularFile); } catch (final IOException e) { throw new UncheckedIOException(e); } }
@Inject public FileSessionPropertyManager(FileSessionPropertyManagerConfig config) requireNonNull(config, "config is null"); sessionMatchSpecs = CODEC.fromJson(Files.readAllBytes(configurationFile)); throw new UncheckedIOException(e);