@Override public String createPath(Resource resource) { String path = ((FileResource) resource).getFile().getPath(); path = path.replaceAll("\\\\", "/"); return path; }
@Override protected ExcelDataContextWriter createWriter(final Resource resource) { assert resource instanceof FileResource; final FileResource fileResource = (FileResource) resource; return new ExcelDataContextWriter(fileResource.getFile(), sheetName); }
public static FileDatastoreEnum inferDatastoreTypeFromResource(final Resource resource) { if (resource instanceof FileResource) { final FileResource fileResource = (FileResource) resource; final File file = fileResource.getFile(); if (file.isDirectory()) { return FileDatastoreEnum.SAS; } } return FileDatastoreEnum.getDatastoreTypeFromResource(resource); }
@Override public void setResource(final FileResource resource) { setFile(resource.getFile()); }
@Override public String createPath(final Resource resource) { final File file = ((FileResource) resource).getFile(); final FileResolver fileResolver = new FileResolver(_homeFolder); return fileResolver.toPath(file); }
/** * Creates a filename string to externalize, based on a given {@link Resource}. * * @param resource * @return * @throws UnsupportedOperationException */ protected String toFilename(final Resource resource) throws UnsupportedOperationException { if (resource instanceof FileResource) { return ((FileResource) resource).getFile().getPath(); } if (resource instanceof HadoopResource) { return ((HadoopResource) resource).getTemplatedPath(); } if (resource instanceof HdfsResource) { return resource.getQualifiedPath(); } throw new UnsupportedOperationException("Unsupported resource type: " + resource); }
public static OutputWriter getWriter(final Resource resource, final String[] headers, final String encoding, final char separatorChar, final char quoteChar, final char escapeChar, final boolean includeHeader, final InputColumn<?>... columns) { final CsvConfiguration csvConfiguration = getConfiguration(encoding, separatorChar, quoteChar, escapeChar, includeHeader); CsvOutputWriter outputWriter; final String qualifiedPath = resource.getQualifiedPath(); synchronized (outputWritersPerPath) { outputWriter = outputWritersPerPath.get(qualifiedPath); if (outputWriter == null) { if (resource instanceof FileResource) { final File file = ((FileResource) resource).getFile(); final File parentFile = file.getParentFile(); if (parentFile != null && !parentFile.exists()) { parentFile.mkdirs(); } } outputWritersPerPath.put(qualifiedPath, outputWriter); counters.put(qualifiedPath, new AtomicInteger(1)); outputWriter = new CsvOutputWriter(resource, csvConfiguration, headers, columns); // write the headers } else { outputWriter = new CsvOutputWriter(resource, csvConfiguration, headers, columns); counters.get(qualifiedPath).incrementAndGet(); } } return outputWriter; }
final File file = ((FileResource) resource).getFile();
final File file = ((FileResource) resource).getFile();
private OPCPackage openOPCPackage() throws Exception { if (_resource instanceof FileResource) { final File file = ((FileResource) _resource).getFile(); return OPCPackage.open(file); } return OPCPackage.open(_resource.read()); }
private OPCPackage openOPCPackage() throws Exception { if (_resource instanceof FileResource) { final File file = ((FileResource) _resource).getFile(); return OPCPackage.open(file); } return OPCPackage.open(_resource.read()); }
/** * Creates a {@link Resource} replacement to use for configured properties. * * @param resource * @param partitionNumber * @return a replacement resource, or null if it shouldn't be replaced */ private Resource createReplacementResource(final Resource resource, final int partitionNumber) { final String formattedPartitionNumber = String.format("%05d", partitionNumber); if (resource instanceof HdfsResource || resource instanceof HadoopResource) { final String path = resource.getQualifiedPath() + "/part-" + formattedPartitionNumber; final URI uri = URI.create(path); return HdfsHelper.createHelper().getResourceToUse(uri); } if (resource instanceof FileResource) { final File file = ((FileResource) resource).getFile(); if (file.exists() && file.isFile()) { // a file already exists - we cannot just create a directory // then return resource; } if (!file.exists()) { file.mkdirs(); } return new FileResource(resource.getQualifiedPath() + "/part-" + formattedPartitionNumber); } return null; }
/** * Callback method used by {@link CsvTableDropBuilder} when execute is * called */ protected void dropTable() { close(); if (_resource instanceof FileResource) { final File file = ((FileResource) _resource).getFile(); final boolean success = file.delete(); if (!success) { throw new MetaModelException("Could not delete (drop) file: " + file); } } else { _resource.write(new Action<OutputStream>() { @Override public void run(OutputStream arg) throws Exception { // do nothing, just write an empty file } }); } }
/** * Callback method used by {@link CsvTableDropBuilder} when execute is * called */ protected void dropTable() { close(); if (_resource instanceof FileResource) { final File file = ((FileResource) _resource).getFile(); final boolean success = file.delete(); if (!success) { throw new MetaModelException("Could not delete (drop) file: " + file); } } else { _resource.write(new Action<OutputStream>() { @Override public void run(OutputStream arg) throws Exception { // do nothing, just write an empty file } }); } }
final File file = ((FileResource) resource).getFile(); try {
public boolean isDirectory(final URI path) { final Resource resource = getResourceToUse(path); if (!resource.isExists()) { return false; } if (resource instanceof FileResource) { return ((FileResource) resource).getFile().isDirectory(); } if (resource instanceof HdfsResource) { final FileSystem fileSystem = ((HdfsResource) resource).getHadoopFileSystem(); final Path hadoopPath = ((HdfsResource) resource).getHadoopPath(); try { return fileSystem.isDirectory(hadoopPath); } catch (final IOException e) { throw new IllegalStateException(e); } } // actually we don't know, but most likely it's not a directory return false; } }
final File file = ((FileResource) resource).getFile(); try {
private ActionListener createWriteDataActionListener(final Class<? extends Analyzer<?>> analyzerClass, final String filenameExtension) { return e -> { final AnalysisJob copyAnalysisJob = _analysisJobBuilder.toAnalysisJob(false); final AnalysisJobBuilder copyAnalysisJobBuilder = new AnalysisJobBuilder(_analysisJobBuilder.getConfiguration(), copyAnalysisJob); final AnalyzerComponentBuilder<? extends Analyzer<?>> analyzer = copyAnalysisJobBuilder.addAnalyzer(analyzerClass); analyzer.addInputColumns(copyAnalysisJobBuilder.getAvailableInputColumns(Object.class)); final String formattedDate = new SimpleDateFormat("yyyy-MM-dd").format(new Date()); final FileResource resource = createResource("datacleaner-" + formattedDate + "-output", filenameExtension); if (analyzerClass == CreateExcelSpreadsheetAnalyzer.class) { final File file = resource.getFile(); analyzer.setConfiguredProperty("File", file); } else { analyzer.setConfiguredProperty("File", resource); } final ConfiguredPropertyDescriptor sheetNameProperty = analyzer.getDescriptor().getConfiguredProperty("Sheet name"); if (sheetNameProperty != null) { analyzer.setConfiguredProperty(sheetNameProperty, "data"); } final RunAnalysisActionListener runAnalysis = new RunAnalysisActionListener(_dcModule, copyAnalysisJobBuilder); ExecuteJobWithoutAnalyzersDialog.this.close(); runAnalysis.run(); }; }
case SAS: final FileResource fileResource = (FileResource) resource; return new SasDatastore(datastoreName, fileResource.getFile()); case DBASE: return new DbaseDatastore(datastoreName, resource.getQualifiedPath());
final Resource resource = resourceDatastore.getResource(); if (resource instanceof FileResource) { final File file = ((FileResource) resource).getFile(); _filenameField.setFile(file);