private void addResources(String addedFiles) throws IOException { for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) { try { URI fileUri = FileUtils.getURI(addedFile); if (fileUri != null && !localFiles.contains(fileUri)) { localFiles.add(fileUri); if (SparkUtilities.needUploadToHDFS(fileUri, sparkConf)) { fileUri = SparkUtilities.uploadToHDFS(fileUri, hiveConf); } remoteClient.addFile(fileUri); } } catch (URISyntaxException e) { LOG.warn("Failed to add file:" + addedFile, e); } } }
private void addJars(String addedJars) throws IOException { for (String addedJar : CSV_SPLITTER.split(Strings.nullToEmpty(addedJars))) { try { URI jarUri = FileUtils.getURI(addedJar); if (jarUri != null && !localJars.contains(jarUri)) { localJars.add(jarUri); if (SparkUtilities.needUploadToHDFS(jarUri, sparkConf)) { jarUri = SparkUtilities.uploadToHDFS(jarUri, hiveConf); } remoteClient.addJar(jarUri); } } catch (URISyntaxException e) { LOG.warn("Failed to add jar:" + addedJar, e); } } }
private void addJars(String addedJars) throws IOException { for (String addedJar : CSV_SPLITTER.split(Strings.nullToEmpty(addedJars))) { try { URI jarUri = FileUtils.getURI(addedJar); if (jarUri != null && !localJars.contains(jarUri)) { localJars.add(jarUri); if (SparkUtilities.needUploadToHDFS(jarUri, sparkConf)) { jarUri = SparkUtilities.uploadToHDFS(jarUri, hiveConf); } remoteClient.addJar(jarUri); } } catch (URISyntaxException e) { LOG.warn("Failed to add jar:" + addedJar, e); } } }
private void addResources(String addedFiles) throws IOException { for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) { try { URI fileUri = FileUtils.getURI(addedFile); if (fileUri != null && !localFiles.contains(fileUri)) { localFiles.add(fileUri); if (SparkUtilities.needUploadToHDFS(fileUri, sparkConf)) { fileUri = SparkUtilities.uploadToHDFS(fileUri, hiveConf); } remoteClient.addFile(fileUri); } } catch (URISyntaxException e) { LOG.warn("Failed to add file:" + addedFile, e); } } }
private void addResources(String addedFiles) throws IOException { for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) { try { URI fileUri = SparkUtilities.getURI(addedFile); if (fileUri != null && !localFiles.contains(fileUri)) { if (SparkUtilities.needUploadToHDFS(fileUri, sparkConf)) { fileUri = SparkUtilities.uploadToHDFS(fileUri, hiveConf); } localFiles.add(fileUri); remoteClient.addFile(fileUri); } } catch (URISyntaxException e) { LOG.warn("Failed to add file:" + addedFile, e); } } }
private void addJars(String addedJars) throws IOException { for (String addedJar : CSV_SPLITTER.split(Strings.nullToEmpty(addedJars))) { try { URI jarUri = SparkUtilities.getURI(addedJar); if (jarUri != null && !localJars.contains(jarUri)) { if (SparkUtilities.needUploadToHDFS(jarUri, sparkConf)) { jarUri = SparkUtilities.uploadToHDFS(jarUri, hiveConf); } localJars.add(jarUri); remoteClient.addJar(jarUri); } } catch (URISyntaxException e) { LOG.warn("Failed to add jar:" + addedJar, e); } } }