@Override public Configuration getConfiguration() { final Configuration configuration = new Configuration(); final Map<String, File> configurationFiles = new HashMap<>(); Arrays.stream(getDirectories()).map(File::new).filter(File::isDirectory).forEach(c -> { final File[] array = c.listFiles(); assert (array != null); Arrays.stream(array).filter(File::isFile).filter(f -> !configurationFiles.containsKey(f.getName())) .filter(f -> FilenameUtils.getExtension(f.getName()).equalsIgnoreCase("xml")) .forEach(f -> configurationFiles.put(f.getName(), f)); }); if (configurationFiles.size() == 0) { throw new IllegalStateException("Specified directories does not contain any Hadoop configuration files"); } configurationFiles.values().stream().map(File::toURI).map(Path::new).forEach(configuration::addResource); return configuration; }
@Override public Configuration getConfiguration() { try { return super.getConfiguration(); } catch (final IllegalStateException e) { if (getDirectories().length == 0) { throw new IllegalStateException( "None of the standard Hadoop environment variables (HADOOP_CONF_DIR, YARN_CONF_DIR) has been set.", e); } else { throw e; } } } }
final String serverName = _server.getName(); if (serverName != null) { _nameTextField.setText(serverName); _nameTextField.setEnabled(false); final String description = _server.getDescription(); if (description != null) { _descriptionTextField.setText(description); new DirectoryBasedHadoopClusterInformation(_nameTextField.getText(), _descriptionTextField.getText(), paths.toArray(new String[paths.size()])); _serverInformationCatalog.addServerInformation(newServer);
servers.add(new DirectoryBasedHadoopClusterInformation("directory", "directopry set up", "C:\\Users\\claudiap\\git\\vagrant-vms\\bigdatavm\\hadoop_conf")); servers.add(new DirectConnectionHadoopClusterInformation("namenode", "directconnection",
private ServerInformation createHadoopClusterInformation(final HadoopClusterType hadoopClusterType, final String name, final String description) { final ServerInformation serverInformation; if (hadoopClusterType.getEnvironmentConfigured() != null) { serverInformation = new EnvironmentBasedHadoopClusterInformation(name, description); } else if (hadoopClusterType.getDirectories() != null) { final List<String> directoryList = hadoopClusterType.getDirectories().getDirectory(); // TODO: Variable-thingy final String[] directories = directoryList.toArray(new String[directoryList.size()]); serverInformation = new DirectoryBasedHadoopClusterInformation(name, description, directories); } else if (hadoopClusterType.getNamenodeUrl() != null) { serverInformation = new DirectConnectionHadoopClusterInformation(name, description, URI.create(hadoopClusterType.getNamenodeUrl())); } else { throw new UnsupportedOperationException("Unsupported hadoop cluster configuration method"); } return serverInformation; }
private List<DirectoryPathPanel> getDirectoriesListPanel(final JPanel parent) { _pathPanels = new ArrayList<>(); if (_server != null) { final String[] directories = _server.getDirectories(); if (directories != null) { for (final String directory : directories) { final DirectoryPathPanel directoryPanel = new DirectoryPathPanel(new File(directory), parent); _pathPanels.add(directoryPanel); } } else { _pathPanels.add(new DirectoryPathPanel(null, parent)); } } else { _pathPanels.add(new DirectoryPathPanel(null, parent)); } return _pathPanels; }
final Element directoriesElement = getDocument().createElement("directories"); hadoopClusterElement.appendChild(directoriesElement); for (final String directory : directoryBasedHadoopClusterInformation.getDirectories()) { appendElement(directoriesElement, "directory", directory);