@SuppressWarnings("FilesLinesLeak") // the user needs to close it in this case @Override public Stream<String> lines() throws IOException { return Files.lines(path, charset); } };
@SuppressWarnings("FilesLinesLeak") // the user needs to close it in this case @Override public Stream<String> lines() throws IOException { return Files.lines(path, charset); } };
@SuppressWarnings("FilesLinesLeak") // the user needs to close it in this case @Override public Stream<String> lines() throws IOException { return Files.lines(path, charset); } };
@SuppressWarnings("FilesLinesLeak") // the user needs to close it in this case @Override public Stream<String> lines() throws IOException { return Files.lines(path, charset); } };
/** * Read the lines from the supplied stream. This function completely reads the stream and therefore closes the stream. * * @param path path to the file with the contents to be read; may not be null * @param lineProcessor the function that this method calls for each line read from the supplied stream; may not be null * @throws IOException if an I/O error occurs */ public static void readLines(Path path, Consumer<String> lineProcessor) throws IOException { Files.lines(path).forEach(lineProcessor); }
public static Stream<String> filePathToStream(String path) { try { return Files.lines(Paths.get("training", path)); } catch (IOException e) { throw new RuntimeException(e); } }
public static List<String> readFileAsLines(String filePath) { List<String> lines = null; try { lines = Files.lines(Paths.get(filePath)).collect(Collectors.toList()); } catch (IOException e) { logger.error("IOException when read data from file : {}", e); } return lines; }
@Override void dump( Logger logger ) { File[] files = SYS_BLOCK.listFiles( File::isDirectory ); if ( files != null ) { for ( File subdir : files ) { File scheduler = new File( subdir, "queue/scheduler" ); if ( scheduler.isFile() ) { try ( Stream<String> lines = Files.lines( scheduler.toPath() ) ) { lines.forEach( logger::log ); } catch ( IOException e ) { // ignore } } } } } },
private static String getClassName(File inputFile) throws FileNotFoundException { try { Path myPath = Paths.get(inputFile.toURI()); Stream<String> lines = Files.lines(myPath); Optional<String> firstClassLine = lines.filter(s -> s.startsWith(".class ")).findFirst(); lines.close(); if (!firstClassLine.isPresent()) { throw new RuntimeException("Missing class directive in " + inputFile); } String line = firstClassLine.get(); Matcher m = CLASS_PATTERN.matcher(line); if (!m.find()) { throw new RuntimeException("Strange class directive: " + line); } return m.group(1); } catch (IOException e) { throw new RuntimeException("Unable to read class name in " + inputFile, e); } }
/** * Applies a lambda (or equivalent {@link Consumer}) to each line and increments the totalLinesConsumed() until * EOF. Multiple invocations to this method will continue from where it left off if new content was appended after * the last read. * * @param action the lambda to apply to each line * @return the number of lines streamed by this invocation * @throws IOException if the file does not exist or is otherwise not readable */ public long stream(Consumer<String> action) throws IOException { long linesStreamed = 0L; if (null == stream) stream = Files.lines(path, new SystemEnvironment().consoleLogCharsetAsCharset()).skip(start); if (null == iterator) iterator = stream.iterator(); while (iterator.hasNext()) { action.accept((String) iterator.next()); ++linesStreamed; ++count; } return linesStreamed; }
private static long countOccurrences( File file, String substring ) throws IOException { try ( Stream<String> lines = Files.lines( file.toPath() ) ) { return lines.filter( line -> line.contains( substring ) ).count(); } } }
private void processFile(InputFile inputFile) { try { Set<Integer> noSonarLines = new HashSet<>(); int[] lineCounter = {1}; try (Stream<String> stream = Files.lines(inputFile.path(), inputFile.charset())) { stream.forEachOrdered(lineStr -> { if (lineStr.contains("//NOSONAR")) { noSonarLines.add(lineCounter[0]); } lineCounter[0]++; }); } noSonarFilter.noSonarInFile(inputFile, noSonarLines); } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
public static String loadContent(final Path filePath) { try { return Files.lines(filePath, StandardCharsets.UTF_8) .collect(Collectors.joining("\n")); } catch (final IOException ioe) { throw new RuntimeException("failed to find test test-schema " + filePath, ioe); } }
private static boolean fileContains( File file, String... expectedStrings ) throws IOException { Set<String> expectedStringSet = asSet( expectedStrings ); try ( Stream<String> lines = Files.lines( file.toPath() ) ) { lines.forEach( line -> expectedStringSet.removeIf( line::contains ) ); } return expectedStringSet.isEmpty(); }
@Test public void shouldGiveFalseWhenPatternDoesNotExistInText() throws Exception { String text = Files.lines(Paths.get("src", "test", "resources", "book.txt")).collect(joining()); boolean patternExists = RandomProblem002.patternExistInText(text, "awesome"); assertFalse(patternExists); } }
@Test public void shouldGiveTrueWhenPatternExistInText() throws Exception { String text = Files.lines(Paths.get("src", "test", "resources", "book.txt")).collect(joining()); boolean patternExists = RandomProblem002.patternExistInText(text, "prudent"); assertTrue(patternExists); }
@Setup public void setUp() throws IOException { queryRunner = new LocalQueryRunner(testSessionBuilder() .setCatalog("memory") .setSchema("default") .build()); queryRunner.installPlugin(new GeoPlugin()); queryRunner.createCatalog("memory", new MemoryConnectorFactory(), ImmutableMap.of()); Path path = Paths.get(BenchmarkSpatialJoin.class.getClassLoader().getResource("us-states.tsv").getPath()); String polygonValues = Files.lines(path) .map(line -> line.split("\t")) .map(parts -> format("('%s', '%s')", parts[0], parts[1])) .collect(Collectors.joining(",")); queryRunner.execute(format("CREATE TABLE memory.default.polygons AS SELECT * FROM (VALUES %s) as t (name, wkt)", polygonValues)); }
@Setup public void setUp() throws IOException { queryRunner = new LocalQueryRunner(testSessionBuilder() .setCatalog("memory") .setSchema("default") .build()); queryRunner.installPlugin(new GeoPlugin()); queryRunner.createCatalog("memory", new MemoryConnectorFactory(), ImmutableMap.of()); Path path = Paths.get(BenchmarkGeometryAggregations.class.getClassLoader().getResource("us-states.tsv").getPath()); String polygonValues = Files.lines(path) .map(line -> line.split("\t")) .map(parts -> format("('%s', '%s')", parts[0], parts[1])) .collect(Collectors.joining(",")); queryRunner.execute( format("CREATE TABLE memory.default.us_states AS SELECT ST_GeometryFromText(t.wkt) AS geom FROM (VALUES %s) as t (name, wkt)", polygonValues)); }
@Test void failingTestsMustProduceProfilerOutput() throws IOException { CONTEXT.clear(); execute( "testThatFails" ); File testDir = CONTEXT.getValue( TEST_DIR ); assertTrue( testDir.exists() ); assertTrue( testDir.isDirectory() ); File profileData = new File( testDir, "profiler-output.txt" ); assertTrue( profileData.exists() ); assertTrue( profileData.isFile() ); try ( Stream<String> lines = Files.lines( profileData.toPath() ) ) { assertTrue( lines.anyMatch( line -> line.contains( "someVeryExpensiveComputation" ) ) ); } }
private static Collection<String> checkFeatures(Path path, Collection<String> previousIDs) throws IOException { Collection<String> seenIDs = new HashSet<>(); for (Path file : IOUtils.listFiles(path, "part-*")) { Path uncompressedFile = copyAndUncompress(file); Files.lines(uncompressedFile).forEach(line -> { List<?> update = TextUtils.readJSON(line, List.class); seenIDs.add(update.get(0).toString()); assertEquals(FEATURES, TextUtils.convertViaJSON(update.get(1), float[].class).length); }); Files.delete(uncompressedFile); } assertNotEquals(0, seenIDs.size()); assertTrue(seenIDs.containsAll(previousIDs)); return seenIDs; }