final InetSocketAddress address = new InetSocketAddress(gateway.getHostname(), blobServerPort); try { ClientUtils.extractAndUploadJobGraphFiles(jobGraph, () -> new BlobClient(address, configuration)); } catch (FlinkException e) { throw new CompletionException(e);
/** * Uploads the given user artifacts using the given {@link BlobClient}, and sets the appropriate blobkeys on the given {@link JobGraph}. * * @param jobGraph jobgraph requiring user artifacts * @param artifactPaths artifacts to upload * @param blobClient client to upload artifacts with * @throws IOException if the upload fails */ private static void uploadAndSetUserArtifacts(JobGraph jobGraph, Collection<Tuple2<String, Path>> artifactPaths, BlobClient blobClient) throws IOException { Collection<Tuple2<String, PermanentBlobKey>> blobKeys = uploadUserArtifacts(jobGraph.getJobID(), artifactPaths, blobClient); setUserArtifactBlobKeys(jobGraph, blobKeys); }
/** * Uploads the given user jars using the given {@link BlobClient}, and sets the appropriate blobkeys on the given {@link JobGraph}. * * @param jobGraph jobgraph requiring user jars * @param userJars jars to upload * @param blobClient client to upload jars with * @throws IOException if the upload fails */ private static void uploadAndSetUserJars(JobGraph jobGraph, Collection<Path> userJars, BlobClient blobClient) throws IOException { Collection<PermanentBlobKey> blobKeys = uploadUserJars(jobGraph.getJobID(), userJars, blobClient); setUserJarBlobKeys(blobKeys, jobGraph); }
/** * Uploads the given jars and artifacts required for the execution of the given {@link JobGraph} using the {@link BlobClient} from * the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param userJars jars to upload * @param userArtifacts artifacts to upload * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void uploadJobGraphFiles( JobGraph jobGraph, Collection<Path> userJars, Collection<Tuple2<String, org.apache.flink.core.fs.Path>> userArtifacts, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { if (!userJars.isEmpty() || !userArtifacts.isEmpty()) { try (BlobClient client = clientSupplier.get()) { uploadAndSetUserJars(jobGraph, userJars, client); uploadAndSetUserArtifacts(jobGraph, userArtifacts, client); } catch (IOException ioe) { throw new FlinkException("Could not upload job files.", ioe); } } }
/** * Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient} * from the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void extractAndUploadJobGraphFiles(JobGraph jobGraph, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
/** * Uploads the given user artifacts using the given {@link BlobClient}, and sets the appropriate blobkeys on the given {@link JobGraph}. * * @param jobGraph jobgraph requiring user artifacts * @param artifactPaths artifacts to upload * @param blobClient client to upload artifacts with * @throws IOException if the upload fails */ private static void uploadAndSetUserArtifacts(JobGraph jobGraph, Collection<Tuple2<String, Path>> artifactPaths, BlobClient blobClient) throws IOException { Collection<Tuple2<String, PermanentBlobKey>> blobKeys = uploadUserArtifacts(jobGraph.getJobID(), artifactPaths, blobClient); setUserArtifactBlobKeys(jobGraph, blobKeys); }
/** * Uploads the given user jars using the given {@link BlobClient}, and sets the appropriate blobkeys on the given {@link JobGraph}. * * @param jobGraph jobgraph requiring user jars * @param userJars jars to upload * @param blobClient client to upload jars with * @throws IOException if the upload fails */ private static void uploadAndSetUserJars(JobGraph jobGraph, Collection<Path> userJars, BlobClient blobClient) throws IOException { Collection<PermanentBlobKey> blobKeys = uploadUserJars(jobGraph.getJobID(), userJars, blobClient); setUserJarBlobKeys(blobKeys, jobGraph); }
/** * Uploads the given jars and artifacts required for the execution of the given {@link JobGraph} using the {@link BlobClient} from * the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param userJars jars to upload * @param userArtifacts artifacts to upload * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void uploadJobGraphFiles( JobGraph jobGraph, Collection<Path> userJars, Collection<Tuple2<String, org.apache.flink.core.fs.Path>> userArtifacts, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { if (!userJars.isEmpty() || !userArtifacts.isEmpty()) { try (BlobClient client = clientSupplier.get()) { uploadAndSetUserJars(jobGraph, userJars, client); uploadAndSetUserArtifacts(jobGraph, userArtifacts, client); } catch (IOException ioe) { throw new FlinkException("Could not upload job files.", ioe); } } }
/** * Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient} * from the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void extractAndUploadJobGraphFiles(JobGraph jobGraph, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
private CompletableFuture<Void> uploadAndSetJobFiles(final CompletableFuture<InetSocketAddress> blobServerAddressFuture, final JobGraph job) { return blobServerAddressFuture.thenAccept(blobServerAddress -> { try { ClientUtils.extractAndUploadJobGraphFiles(job, () -> new BlobClient(blobServerAddress, miniClusterConfiguration.getConfiguration())); } catch (FlinkException e) { throw new CompletionException(e); } }); }
/** * Uploads the given user artifacts using the given {@link BlobClient}, and sets the appropriate blobkeys on the given {@link JobGraph}. * * @param jobGraph jobgraph requiring user artifacts * @param artifactPaths artifacts to upload * @param blobClient client to upload artifacts with * @throws IOException if the upload fails */ private static void uploadAndSetUserArtifacts(JobGraph jobGraph, Collection<Tuple2<String, Path>> artifactPaths, BlobClient blobClient) throws IOException { Collection<Tuple2<String, PermanentBlobKey>> blobKeys = uploadUserArtifacts(jobGraph.getJobID(), artifactPaths, blobClient); setUserArtifactBlobKeys(jobGraph, blobKeys); }
/** * Uploads the given user jars using the given {@link BlobClient}, and sets the appropriate blobkeys on the given {@link JobGraph}. * * @param jobGraph jobgraph requiring user jars * @param userJars jars to upload * @param blobClient client to upload jars with * @throws IOException if the upload fails */ private static void uploadAndSetUserJars(JobGraph jobGraph, Collection<Path> userJars, BlobClient blobClient) throws IOException { Collection<PermanentBlobKey> blobKeys = uploadUserJars(jobGraph.getJobID(), userJars, blobClient); setUserJarBlobKeys(blobKeys, jobGraph); }
/** * Uploads the given jars and artifacts required for the execution of the given {@link JobGraph} using the {@link BlobClient} from * the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param userJars jars to upload * @param userArtifacts artifacts to upload * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void uploadJobGraphFiles( JobGraph jobGraph, Collection<Path> userJars, Collection<Tuple2<String, org.apache.flink.core.fs.Path>> userArtifacts, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { if (!userJars.isEmpty() || !userArtifacts.isEmpty()) { try (BlobClient client = clientSupplier.get()) { uploadAndSetUserJars(jobGraph, userJars, client); uploadAndSetUserArtifacts(jobGraph, userArtifacts, client); } catch (IOException ioe) { throw new FlinkException("Could not upload job files.", ioe); } } }
/** * Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient} * from the given {@link Supplier}. * * @param jobGraph jobgraph requiring files * @param clientSupplier supplier of blob client to upload files with * @throws FlinkException if the upload fails */ public static void extractAndUploadJobGraphFiles(JobGraph jobGraph, SupplierWithException<BlobClient, IOException> clientSupplier) throws FlinkException { List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
private CompletableFuture<Void> uploadAndSetJobFiles(final CompletableFuture<InetSocketAddress> blobServerAddressFuture, final JobGraph job) { return blobServerAddressFuture.thenAccept(blobServerAddress -> { try { ClientUtils.extractAndUploadJobGraphFiles(job, () -> new BlobClient(blobServerAddress, miniClusterConfiguration.getConfiguration())); } catch (FlinkException e) { throw new CompletionException(e); } }); }
private CompletableFuture<JobGraph> uploadJobGraphFiles( DispatcherGateway gateway, CompletableFuture<JobGraph> jobGraphFuture, Collection<Path> jarFiles, Collection<Tuple2<String, Path>> artifacts, Configuration configuration) { CompletableFuture<Integer> blobServerPortFuture = gateway.getBlobServerPort(timeout); return jobGraphFuture.thenCombine(blobServerPortFuture, (JobGraph jobGraph, Integer blobServerPort) -> { final InetSocketAddress address = new InetSocketAddress(gateway.getHostname(), blobServerPort); try { // Add users jars located in dfs for (Path path : jobGraph.getUserJars()) { if (path.getFileSystem().isDistributedFS()) { jarFiles.add(path); } } log.info("Uploading jarFiles {} and userArtifacts {} to blob server", jarFiles.toString(), artifacts.toString()); ClientUtils.uploadJobGraphFiles(jobGraph, jarFiles, artifacts, () -> new BlobClient(address, configuration)); } catch (IOException | FlinkException e) { throw new CompletionException(new RestHandlerException( "Could not upload job files.", HttpResponseStatus.INTERNAL_SERVER_ERROR, e)); } return jobGraph; }); }
private CompletableFuture<Void> uploadAndSetJobFiles(final CompletableFuture<InetSocketAddress> blobServerAddressFuture, final JobGraph job) { return blobServerAddressFuture.thenAccept(blobServerAddress -> { try { ClientUtils.extractAndUploadJobGraphFiles(job, () -> new BlobClient(blobServerAddress, miniClusterConfiguration.getConfiguration())); } catch (FlinkException e) { throw new CompletionException(e); } }); }
private CompletableFuture<JobGraph> uploadJobGraphFiles( DispatcherGateway gateway, CompletableFuture<JobGraph> jobGraphFuture, Collection<Path> jarFiles, Collection<Tuple2<String, Path>> artifacts, Configuration configuration) { CompletableFuture<Integer> blobServerPortFuture = gateway.getBlobServerPort(timeout); return jobGraphFuture.thenCombine(blobServerPortFuture, (JobGraph jobGraph, Integer blobServerPort) -> { final InetSocketAddress address = new InetSocketAddress(gateway.getHostname(), blobServerPort); try { ClientUtils.uploadJobGraphFiles(jobGraph, jarFiles, artifacts, () -> new BlobClient(address, configuration)); } catch (FlinkException e) { throw new CompletionException(new RestHandlerException( "Could not upload job files.", HttpResponseStatus.INTERNAL_SERVER_ERROR, e)); } return jobGraph; }); }
final InetSocketAddress address = new InetSocketAddress(gateway.getHostname(), blobServerPort); try { ClientUtils.extractAndUploadJobGraphFiles(jobGraph, () -> new BlobClient(address, configuration)); } catch (FlinkException e) { throw new CompletionException(e);
private CompletableFuture<JobGraph> uploadJobGraphFiles( DispatcherGateway gateway, CompletableFuture<JobGraph> jobGraphFuture, Collection<Path> jarFiles, Collection<Tuple2<String, Path>> artifacts, Configuration configuration) { CompletableFuture<Integer> blobServerPortFuture = gateway.getBlobServerPort(timeout); return jobGraphFuture.thenCombine(blobServerPortFuture, (JobGraph jobGraph, Integer blobServerPort) -> { final InetSocketAddress address = new InetSocketAddress(gateway.getHostname(), blobServerPort); try { ClientUtils.uploadJobGraphFiles(jobGraph, jarFiles, artifacts, () -> new BlobClient(address, configuration)); } catch (FlinkException e) { throw new CompletionException(new RestHandlerException( "Could not upload job files.", HttpResponseStatus.INTERNAL_SERVER_ERROR, e)); } return jobGraph; }); }