Path logdir = new Path(conf.get(XLearningConfiguration.XLEARNING_HISTORY_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_HISTORY_LOG_DIR) + "/" + applicationAttemptID.getApplicationId().toString() + "/" + applicationAttemptID.getApplicationId().toString()); FileSystem fs = FileSystem.get(xlearningConf); FSDataOutputStream out = fs.create(jobLogPath); fs.setPermission(jobLogPath, new FsPermission(LOG_FILE_PERMISSION)); if (conf.getBoolean(XLearningConfiguration.XLEARNING_HOST_LOCAL_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_HOST_LOCAL_ENABLE)) { Path hostLocaldir = new Path(conf.get(XLearningConfiguration.XLEARNING_HISTORY_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_HISTORY_DIR) + "/" + applicationAttemptID.getApplicationId().toString()); Map<String, String> containerMessage = new HashMap<>(); containerMessage.put(AMParams.CONTAINER_HTTP_ADDRESS, container.getNodeHttpAddress()); if (tfEvaluator && container.getId().toString().equals(tfEvaluatorContainerId)) { containerMessage.put(AMParams.CONTAINER_ROLE, XLearningConstants.EVALUATOR); } else { container.getId().toString(), userName)); logMessage.put(container.getId().toString(), containerMessage);
conf.addResource(new Path(XLearningConstants.XLEARNING_JOB_CONFIGURATION)); System.setProperty(XLearningConstants.Environment.HADOOP_USER_NAME.toString(), conf.get("hadoop.job.ugi").split(",")[0]); outputInfos = new ArrayList<>(); input2FileStatus = new ConcurrentHashMap<>(); inputFileSplits = null; containerId2InputSplit = new ConcurrentHashMap<>(); statusUpdateInterval = conf.getInt(XLearningConfiguration.XLEARNING_STATUS_UPDATE_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_STATUS_PULL_INTERVAL); applicationAttemptID = Records.newRecord(ApplicationAttemptId.class); applicationMessageQueue = new LinkedBlockingQueue<>( + applicationAttemptID.getApplicationId().getId() + ", clustertimestamp=" + applicationAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + applicationAttemptID.getAttemptId()); if (applicationAttemptID.getAttemptId() > 1 && (conf.getInt(XLearningConfiguration.XLEARNING_APP_MAX_ATTEMPTS, XLearningConfiguration.DEFAULT_XLEARNING_APP_MAX_ATTEMPTS) > 1)) { appJarRemoteLocation = new Path(envs.get(XLearningConstants.Environment.APP_JAR_LOCATION.toString())); LOG.info("Application jar location: " + appJarRemoteLocation); appConfRemoteLocation = new Path(envs.get(XLearningConstants.Environment.XLEARNING_JOB_CONF_LOCATION.toString())); LOG.info("Application conf location: " + appConfRemoteLocation);
public ContainerFactory(ApplicationAttemptId appAttemptId, long appIdLong) { this.nextId = new AtomicLong(1); ApplicationId appId = ApplicationId.newInstance(appIdLong, appAttemptId.getApplicationId().getId()); this.customAppAttemptId = ApplicationAttemptId.newInstance(appId, appAttemptId.getAttemptId()); }
@Override public void write(DataOutput out) throws IOException { out.writeLong(fakeAppId.getClusterTimestamp()); out.writeInt(fakeAppId.getId()); token.write(out); out.writeLong(creationTime); out.writeInt(vertexParallelism); if (vertexSpec != null) { out.writeInt(vertexSpec.length); out.write(vertexSpec); } else { out.writeInt(0); } if (vertexSpecSignature != null) { out.writeInt(vertexSpecSignature.length); out.write(vertexSpecSignature); } else { out.writeInt(0); } }
ContainerId containerId = ConverterUtils.toContainerId(envs .get(ApplicationConstants.Environment.CONTAINER_ID.name())); jstormMasterContext.appAttemptID = containerId.getApplicationAttemptId(); + jstormMasterContext.appAttemptID.getApplicationId().getId() + ", clustertimestamp=" + jstormMasterContext.appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + jstormMasterContext.appAttemptID.getAttemptId()); conf.set(JOYConstants.INSTANCE_DEPLOY_DIR_KEY, envs.get(JOYConstants.BINARYFILEDEPLOYPATH)); jstormMasterContext.deployPath = envs.get(JOYConstants.BINARYFILEDEPLOYPATH); conf.set(JOYConstants.INSTANCE_NAME_KEY, envs.get(JOYConstants.INSTANCENAME)); jstormMasterContext.instanceName = envs.get(JOYConstants.INSTANCENAME);
assertFalse(targetFileSystem.exists(targetDir)); srcPath = new Path("file://" + srcDir.getAbsolutePath()); } else { srcPath = new Path(srcDir.getAbsolutePath()); HashMap<String, LocalResource> localResources = new HashMap<>(); AbstractYarnClusterDescriptor.uploadAndRegisterFiles( Collections.singletonList(new File(srcPath.toUri().getPath())), targetFileSystem, targetDir, ApplicationId.newInstance(0, 0), remotePaths, localResources, targetFileSystem.listFiles(workDir, true); HashMap<String /* (relative) path */, /* contents */ String> targetFiles = new HashMap<>(4); try (FSDataInputStream in = targetFileSystem.open(targetFile.getPath())) { String absolutePathString = targetFile.getPath().toString(); String relativePath = absolutePathString.substring(workDirPrefixLength);
FileSystem fs = FileSystem.get(jstormClientContext.conf); addToLocalResources(fs, jstormClientContext.appMasterJar, JOYConstants.appMasterJarPath, appId.toString(), localResources, null); addToLocalResources(fs, JOYConstants.CONF_NAME, JOYConstants.CONF_NAME, appId.toString(), localResources, null); } else { addToLocalResources(fs, jstormClientContext.confFile, JOYConstants.CONF_NAME, appId.toString(), localResources, null); String[] strArr = libPath.split(JOYConstants.BACKLASH); String libName = strArr[strArr.length - 1]; addToLocalResources(fs, libPath, libName, appId.toString(), localResources, null); addToLocalResources(fs, jstormClientContext.log4jPropFile, JOYConstants.log4jPath, appId.toString(), localResources, null); long hdfsShellScriptTimestamp = 0; if (!jstormClientContext.shellScriptPath.isEmpty()) { Path shellSrc = new Path(jstormClientContext.shellScriptPath); String shellPathSuffix = jstormClientContext.appName + JOYConstants.BACKLASH + appId.toString() + JOYConstants.BACKLASH + JOYConstants.SCRIPT_PATH; Path shellDst = new Path(fs.getHomeDirectory(), shellPathSuffix); fs.copyFromLocalFile(false, true, shellSrc, shellDst); hdfsShellScriptLocation = shellDst.toUri().toString(); addToLocalResources(fs, null, JOYConstants.shellCommandPath, appId.toString(),
throw new RuntimeException("Error cacheFile path format " + path); pathRemote = new Path(paths[0]); } else { pathRemote = new Path(path); if (!pathRemote.getFileSystem(conf).exists(pathRemote)) { throw new IOException("cacheFile path " + pathRemote + " not existed!"); pathRemote = new Path(path); if (!pathRemote.getFileSystem(conf).exists(pathRemote)) { throw new IOException("cacheArchive path " + pathRemote + " not existed!"); LOG.info("Got new Application: " + applicationId.toString()); FileSystem.create(jobConfPath.getFileSystem(conf), jobConfPath, new FsPermission(JOB_FILE_PERMISSION)); conf.writeXml(out);
Path renamedScriptPath; if (Shell.WINDOWS) { renamedScriptPath = new Path(jstormMasterContext.scriptPath + ".bat"); } else { renamedScriptPath = new Path(jstormMasterContext.scriptPath + ".sh"); try { yarnUrl = ConverterUtils.getYarnUrlFromURI( new URI(renamedScriptPath.toString())); } catch (URISyntaxException e) { LOG.error("Error when trying to use shell script path specified" jstormMasterContext.nimbusDataDirPrefix = conf.get(JOYConstants.INSTANCE_DATA_DIR_KEY); String localDir = jstormMasterContext.nimbusDataDirPrefix + container.getId().toString() + JOYConstants.BACKLASH + jstormMasterContext.instanceName; vargs.add(localDir); slotPortsView.setMinPort(conf.getInt(JOYConstants.SUPERVISOR_MIN_PORT_KEY, JOYConstants.PORT_RANGE_MIN)); slotPortsView.setMaxPort(conf.getInt(JOYConstants.SUPERVISOR_MAX_PORT_KEY, JOYConstants.PORT_RANGE_MAX)); String slotPortsStr = JOYConstants.EMPTY; try { deployDst = jstormMasterContext.nimbusDataDirPrefix; String dstPath = deployDst + container.getId().toString(); this.container.getId().toString(), localDir, jstormMasterContext.deployPath, hadoopHome, javaHome, pythonHome, dstPath, slotPortsStr, jstormMasterContext.shellArgs, envs.get(JOYConstants.CLASS_PATH), JOYConstants.ExecShellStringPath, jstormMasterContext.appAttemptID.getApplicationId().toString(), logviewPort, nimbusThriftPort);
final FileSystem fs = FileSystem.get(yarnConfiguration); final Path homeDir = fs.getHomeDirectory(); fs.getScheme().startsWith("file")) { fs, appId, new Path(tmpConfigurationFile.getAbsolutePath()), localResources, homeDir, File fp = File.createTempFile(appId.toString(), null); fp.deleteOnExit(); try (FileOutputStream output = new FileOutputStream(fp); fs, appId, new Path(fp.toURI()), localResources, homeDir, appMasterEnv.put(YarnConfigKeys.ENV_APP_ID, appId.toString()); throw new YarnDeploymentException("Failed to deploy the cluster.", e); YarnApplicationState appState = report.getYarnApplicationState(); LOG.debug("Application State: {}", appState); switch(appState) {
FileSystem fs = FileSystem.get(conf); createMaaSDirectory(fs, appId.toString()); Path ajPath = addToLocalResources(fs, appMasterJar, appMasterJarPath, appId.toString(), localResources, null); addToLocalResources(fs, log4jPropFile, log4jPath, appId.toString(), localResources, null); for (String c : conf.getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) { if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) { classPathEnv.append(':'); classPathEnv.append(System.getProperty("java.class.path")); ,ApplicationMaster.AMOptions.APP_JAR_PATH.of(ajPath.toString()) String tokenRenewer = conf.get(YarnConfiguration.RM_PRINCIPAL); if (tokenRenewer == null || tokenRenewer.length() == 0) { throw new IOException( fs.addDelegationTokens(tokenRenewer, credentials); if (tokens != null) { for (Token<?> token : tokens) { LOG.info("Got dt for " + fs.getUri() + "; " + token);
this.conf = conf; this.app = app; set(APP_ID, app.context.getApplicationID().toString()); if (System.getenv().containsKey(XLearningConstants.Environment.XLEARNING_APP_TYPE.toString())) { if ("xlearning".equals(System.getenv(XLearningConstants.Environment.XLEARNING_APP_TYPE.toString()).toLowerCase())) { if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE)) { if (boardUrl != null) { set(BOARD_INFO, boardUrl); set(WORKER_MEMORY, String.format("%.2f", app.context.getWorkerMemory() / 1024.0)); set(PS_MEMORY, String.format("%.2f", app.context.getPsMemory() / 1024.0)); set(USER_NAME, StringUtils.split(conf.get("hadoop.job.ugi"), ',')[0]); int i = 0; for (Container container : workerContainers) { set(CONTAINER_STATUS + i, "-"); if (conf.getBoolean(XLearningConfiguration.XLEARNING_TF_EVALUATOR, XLearningConfiguration.DEFAULT_XLEARNING_TF_EVALUATOR) && container.getId().toString().equals(app.context.getTfEvaluatorId())) { set(CONTAINER_ROLE + i, XLearningConstants.EVALUATOR); } else { Path interResult = new Path(output.getDfsLocation() + conf.get(XLearningConfiguration.XLEARNING_INTERREAULST_DIR, XLearningConfiguration.DEFAULT_XLEARNING_INTERRESULT_DIR)); set(OUTPUT_PATH + i, interResult.toString());
public static Path getRemotePath(XLearningConfiguration conf, ApplicationId appId, String fileName) { String pathSuffix = appId.toString() + "/" + fileName; Path remotePath = new Path(conf.get(XLearningConfiguration.XLEARNING_STAGING_DIR, XLearningConfiguration.DEFAULT_XLEARNING_STAGING_DIR), pathSuffix); remotePath = new Path(conf.get("fs.defaultFS"), remotePath); LOG.debug("Got remote path of " + fileName + " is " + remotePath.toString()); return remotePath; }
private void addContainerLocalResources(ApplicationId applicationId) throws IOException { Path appWorkDir = GobblinClusterUtils.getAppWorkDirPath(this.fs, this.applicationName, applicationId.toString()); Path containerWorkDir = new Path(appWorkDir, GobblinYarnConfigurationKeys.CONTAINER_WORK_DIR_NAME); if (this.config.hasPath(GobblinYarnConfigurationKeys.CONTAINER_JARS_KEY)) { Path appJarsDestDir = new Path(containerWorkDir, GobblinYarnConfigurationKeys.APP_JARS_DIR_NAME); addAppJars(this.config.getString(GobblinYarnConfigurationKeys.CONTAINER_JARS_KEY), Optional.<Map<String, LocalResource>>absent(), appJarsDestDir); } if (this.config.hasPath(GobblinYarnConfigurationKeys.CONTAINER_FILES_LOCAL_KEY)) { Path appFilesDestDir = new Path(containerWorkDir, GobblinYarnConfigurationKeys.APP_FILES_DIR_NAME); addAppLocalFiles(this.config.getString(GobblinYarnConfigurationKeys.CONTAINER_FILES_LOCAL_KEY), Optional.<Map<String, LocalResource>>absent(), appFilesDestDir); } }
/** * Working folder to store jars, files and other resources */ public static Path getAppDirectory(FileSystem fs, ApplicationId appId) { return fs.makeQualified(new Path( new Path(File.separator + GUAGUA_YARN_TMP, GuaguaYarnConstants.GUAGUA_HDFS_DIR), appId.toString())); }
private Path getDonePath(ApplicationId appId) { // cut up the app ID into mod(1000) buckets int appNum = appId.getId(); appNum /= 1000; int bucket2 = appNum % 1000; int bucket1 = appNum / 1000; return new Path(doneRootPath, String.format(APP_DONE_DIR_FORMAT, appId.getClusterTimestamp(), bucket1, bucket2, appId.toString())); }
private HistoryFileReader getHistoryFileReader(ApplicationId appId) throws IOException { Path applicationHistoryFile = new Path(rootDirPath, appId.toString()); if (!fs.exists(applicationHistoryFile)) { throw new IOException("History file for application " + appId + " is not found"); } // The history file is still under writing if (outstandingWriters.containsKey(appId)) { throw new IOException("History file for application " + appId + " is under writing"); } return new HistoryFileReader(applicationHistoryFile); }
private Path createApplicationDir(ApplicationId appId) throws IOException { Path appRootDir = getAppRootDir(authUgi.getShortUserName()); Path appDir = new Path(appRootDir, appId.toString()); if (FileSystem.mkdirs(fs, appDir, new FsPermission(APP_LOG_DIR_PERMISSIONS))) { if (LOG.isDebugEnabled()) { LOG.debug("New app directory created - " + appDir); } } return appDir; }
@VisibleForTesting void cleanUpAppWorkDirectory(ApplicationId applicationId) throws IOException { Path appWorkDir = GobblinClusterUtils.getAppWorkDirPath(this.fs, this.applicationName, applicationId.toString()); if (this.fs.exists(appWorkDir)) { LOGGER.info("Deleting application working directory " + appWorkDir); this.fs.delete(appWorkDir, true); } }