private void addResources(String addedFiles) { for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) { if (!localFiles.contains(addedFile)) { localFiles.add(addedFile); sc.addFile(addedFile); } } }
@Override public Serializable call(JobContext jc) throws Exception { jc.sc().addFile(path); return null; }
private void addResources(String addedFiles) { for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) { if (!localFiles.contains(addedFile)) { localFiles.add(addedFile); sc.addFile(addedFile); } } }
sc.addFile(distScript); JavaRDD<String> pipeInputs = contactsContactLists.values().map(new VerifyCallLogs()).flatMap( new FlatMapFunction<CallLog[], String>() { public Iterable<String> call(CallLog[] calls) {
@Override public Serializable call(JobContext jc) throws Exception { jc.sc().addFile(path); return null; }
@Override public Serializable call(JobContext jc) throws Exception { jc.sc().addFile(path); return null; }
protected void addFile(String path) { jc.sc().addFile(path); }
private void addResources(String addedFiles) { for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) { if (!localFiles.contains(addedFile)) { localFiles.add(addedFile); sc.addFile(addedFile); } } }
.toExternalForm()); }else if( resourceType == ResourceType.FILE){ sparkContext.addFile(resourcePath.toURI().toURL() .toExternalForm());
private void distributeFiles() { try { URI[] uris = DistributedCache.getCacheFiles(conf); if (uris != null) { URI[] outURIs = new URI[uris.length]; for (int i = 0; i < uris.length; i++) { Path path = new Path(uris[i]); FileSystem fs = path.getFileSystem(conf); if (fs.isFile(path)) { outURIs[i] = uris[i]; } else { Path mergePath = new Path(path.getParent(), "sparkreadable-" + path.getName()); FileUtil.copyMerge(fs, path, fs, mergePath, false, conf, ""); outURIs[i] = mergePath.toUri(); } sparkContext.addFile(outURIs[i].toString()); } DistributedCache.setCacheFiles(outURIs, conf); } } catch (IOException e) { throw new RuntimeException("Error retrieving cache files", e); } }
private void distributeFiles() { try { URI[] uris = DistributedCache.getCacheFiles(conf); if (uris != null) { URI[] outURIs = new URI[uris.length]; for (int i = 0; i < uris.length; i++) { Path path = new Path(uris[i]); FileSystem fs = path.getFileSystem(conf); if (fs.isFile(path)) { outURIs[i] = uris[i]; } else { Path mergePath = new Path(path.getParent(), "sparkreadable-" + path.getName()); FileUtil.copyMerge(fs, path, fs, mergePath, false, conf, ""); outURIs[i] = mergePath.toUri(); } sparkContext.addFile(outURIs[i].toString()); } DistributedCache.setCacheFiles(outURIs, conf); } } catch (IOException e) { throw new RuntimeException("Error retrieving cache files", e); } }