private static Supplier<InputSource> createInputSourceRef(final Resource resource) { return () -> { final InputStream in = resource.read(); return new InputSource(in); }; }
public CsvDatastore(String name, Resource resource) { this(name, resource, resource.getName(), CsvConfiguration.DEFAULT_QUOTE_CHAR, CsvConfiguration.DEFAULT_SEPARATOR_CHAR, CsvConfiguration.DEFAULT_ESCAPE_CHAR, FileHelper.DEFAULT_ENCODING, true, CsvConfiguration.DEFAULT_COLUMN_NAME_LINE); }
private Properties loadProperties() throws IOException { final Properties properties = new Properties(); if (!captureStateFile.isExists()) { logger.info("Capture state file does not exist: {}", captureStateFile); return properties; } captureStateFile.read((Action<InputStream>) properties::load); return properties; }
final String name = resource.getName(); final String qualifiedPath = resource.getQualifiedPath();
private DocumentSource createDocumentSource() { final InputStream inputStream = _resource.read(); try { final MappingJsonFactory jsonFactory = new MappingJsonFactory(); final JsonParser parser = jsonFactory.createParser(inputStream); logger.debug("Created JSON parser for resource: {}", _resource); return new JsonDocumentSource(parser, _resource.getName()); } catch (Exception e) { FileHelper.safeClose(inputStream); throw new MetaModelException("Unexpected error while creating JSON parser", e); } }
public HadoopResource(final Resource resource, final Configuration configuration, final String clusterReferenceName) { super(resource.getQualifiedPath()); _configuration = configuration; _clusterReferenceName = clusterReferenceName; }
@Override protected Resource doInBackground() throws Exception { if (!resource.isExists()) { return null; } return resource; }
/** * Writes the {@link Workbook} to a {@link Resource}. The {@link Workbook} will be closed as a result of this * operation! * * @param dataContext * @param wb */ public static void writeAndCloseWorkbook(ExcelDataContext dataContext, final Workbook wb) { // first write to a temp file to avoid that workbook source is the same // as the target (will cause read+write cyclic overflow) final Resource realResource = dataContext.getResource(); final Resource tempResource = new InMemoryResource(realResource.getQualifiedPath()); tempResource.write(out -> wb.write(out)); FileHelper.safeClose(wb); FileHelper.copy(tempResource, realResource); }
@Override protected Writer createWriter(final Resource resource) { final OutputStream outputStream = resource.write(); return FileHelper.getWriter(outputStream, getSafeEncoding()); } };
private Writer getWriter(boolean append) { if (_writer == null || !append) { final boolean needsLineBreak = needsLineBreak(_resource, _configuration); final OutputStream out; if (append) { out = _resource.append(); } else { out = _resource.write(); } final boolean insertBom = !append; final Writer writer = FileHelper.getWriter(out, _configuration.getEncoding(), insertBom); if (needsLineBreak) { try { writer.write('\n'); } catch (IOException e) { logger.debug("Failed to insert newline", e); } } _writer = writer; } return _writer; }
private DocumentSource createDocumentSource() { final InputStream inputStream = _resource.read(); try { final MappingJsonFactory jsonFactory = new MappingJsonFactory(); final JsonParser parser = jsonFactory.createParser(inputStream); logger.debug("Created JSON parser for resource: {}", _resource); return new JsonDocumentSource(parser, _resource.getName()); } catch (Exception e) { FileHelper.safeClose(inputStream); throw new MetaModelException("Unexpected error while creating JSON parser", e); } }
@Override public String createPath(final Resource resource) { final String prefix = getScheme() + "://"; String path = resource.getQualifiedPath(); if (path.startsWith(prefix)) { path = path.substring(prefix.length()); } return path; }
final String name = resource.getName(); final String qualifiedPath = resource.getQualifiedPath();
@Validate public void validate() { if (!overwriteFileIfExists && file.isExists()) { throw new IllegalStateException( "The file already exists. Please configure the job to overwrite the existing file."); } }