for (Path jar : jobGraph.getUserJars()) { jarFileNames.add(jar.getName()); filesToUpload.add(new FileUpload(Paths.get(jar.toUri()), RestConstants.CONTENT_TYPE_JAR));
for (org.apache.flink.core.fs.Path path : jobGraph.getUserJars()) { userJarFiles.add(new File(path.toUri()));
/** * Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient} * from the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void extractAndUploadJobGraphFiles(JobGraph jobGraph, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
/** * Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient} * from the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void extractAndUploadJobGraphFiles(JobGraph jobGraph, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
/** * Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient} * from the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void extractAndUploadJobGraphFiles(JobGraph jobGraph, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
private CompletableFuture<JobGraph> uploadJobGraphFiles( DispatcherGateway gateway, CompletableFuture<JobGraph> jobGraphFuture, Collection<Path> jarFiles, Collection<Tuple2<String, Path>> artifacts, Configuration configuration) { CompletableFuture<Integer> blobServerPortFuture = gateway.getBlobServerPort(timeout); return jobGraphFuture.thenCombine(blobServerPortFuture, (JobGraph jobGraph, Integer blobServerPort) -> { final InetSocketAddress address = new InetSocketAddress(gateway.getHostname(), blobServerPort); try { // Add users jars located in dfs for (Path path : jobGraph.getUserJars()) { if (path.getFileSystem().isDistributedFS()) { jarFiles.add(path); } } log.info("Uploading jarFiles {} and userArtifacts {} to blob server", jarFiles.toString(), artifacts.toString()); ClientUtils.uploadJobGraphFiles(jobGraph, jarFiles, artifacts, () -> new BlobClient(address, configuration)); } catch (IOException | FlinkException e) { throw new CompletionException(new RestHandlerException( "Could not upload job files.", HttpResponseStatus.INTERNAL_SERVER_ERROR, e)); } return jobGraph; }); }
for (Path jar : jobGraph.getUserJars()) { jarFileNames.add(jar.getName()); filesToUpload.add(new FileUpload(Paths.get(jar.toUri()), RestConstants.CONTENT_TYPE_JAR));
for (org.apache.flink.core.fs.Path path : jobGraph.getUserJars()) { userJarFiles.add(new File(path.toUri()));
log.info("Uploading user-jars is disabled"); } else { for (Path jar : jobGraph.getUserJars()) { try { if (!jar.getFileSystem().isDistributedFS()) {
for (org.apache.flink.core.fs.Path path : jobGraph.getUserJars()) { userJarFiles.add(new File(path.toUri()));