private boolean verifyJobSubmissionByJobId(Cluster cluster, String jobID) throws SSHApiException { JobStatus status = cluster.getJobStatus(jobID); return status != null && status != JobStatus.U; }
public void setStatus(HashMap<String, String> status) { this.setStatus(status); }
private String verifyJobSubmission(Cluster cluster, JobDetails jobDetails) { String jobName = jobDetails.getJobName(); String jobId = null; try { jobId = cluster.getJobIdByJobName(jobName, cluster.getServerInfo().getUserName()); } catch (SSHApiException e) { log.error("Error while verifying JobId from JobName"); } return jobId; }
public synchronized JobDescriptor getJobDescriptorById(String jobID) throws SSHApiException { RawCommandInfo rawCommandInfo = jobManagerConfiguration.getMonitorCommand(jobID); StandardOutReader stdOutReader = new StandardOutReader(); log.info("Executing RawCommand : " + rawCommandInfo.getCommand()); CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader); String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !",jobManagerConfiguration.getBaseMonitorCommand()); JobDescriptor jobDescriptor = new JobDescriptor(); jobManagerConfiguration.getParser().parseSingleJob(jobDescriptor, result); return jobDescriptor; }
@Deprecated public synchronized void getJobStatuses(String userName, Map<String,JobStatus> jobIDs)throws SSHApiException { int retry = 3; RawCommandInfo rawCommandInfo = jobManagerConfiguration.getUserBasedMonitorCommand(userName); StandardOutReader stdOutReader = new StandardOutReader(); while (retry > 0){ try { log.info("Executing RawCommand : " + rawCommandInfo.getCommand()); CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader); retry=0; } catch (SSHApiException e) { retry--; try { Thread.sleep(5000); } catch (InterruptedException e1) { log.error(e1.getMessage(), e1); } reconnect(serverInfo, authenticationInfo); if (retry == 0) { throw new SSHApiException("Failed Getting statuses to remote file", e); } } } String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !", jobManagerConfiguration.getBaseMonitorCommand()); jobManagerConfiguration.getParser().parseJobStatuses(userName, jobIDs, result); }
public synchronized JobStatus getJobStatus(String jobID) throws SSHApiException { RawCommandInfo rawCommandInfo = jobManagerConfiguration.getMonitorCommand(jobID); StandardOutReader stdOutReader = new StandardOutReader(); log.info("Executing RawCommand : " + rawCommandInfo.getCommand()); CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader); String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !", jobManagerConfiguration.getBaseMonitorCommand()); return jobManagerConfiguration.getParser().parseJobStatus(jobID, result); }
public HPCMonitorID(AuthenticationInfo authenticationInfo, JobExecutionContext jobExecutionContext) { super(jobExecutionContext); this.authenticationInfo = authenticationInfo; if (this.authenticationInfo != null) { try { String hostAddress = jobExecutionContext.getHostName(); SecurityContext securityContext = jobExecutionContext.getSecurityContext(hostAddress); ServerInfo serverInfo = null; if (securityContext != null) { if (securityContext instanceof GSISecurityContext){ serverInfo = (((GSISecurityContext) securityContext).getPbsCluster()).getServerInfo(); if (serverInfo.getUserName() != null) { setUserName(serverInfo.getUserName()); } } if (securityContext instanceof SSHSecurityContext){ serverInfo = (((SSHSecurityContext) securityContext).getPbsCluster()).getServerInfo(); if (serverInfo.getUserName() != null) { setUserName(serverInfo.getUserName()); } } } } catch (GFacException e) { logger.error("Error while getting security context", e); } } }
public synchronized String submitBatchJobWithScript(String scriptPath, String workingDirectory) throws SSHApiException { this.scpTo(workingDirectory, scriptPath); // since this is a constant we do not ask users to fill this // RawCommandInfo rawCommandInfo = new RawCommandInfo(this.installedPath + this.jobManagerConfiguration.getSubmitCommand() + " " + // workingDirectory + File.separator + FilenameUtils.getName(scriptPath)); RawCommandInfo rawCommandInfo = jobManagerConfiguration.getSubmitCommand(workingDirectory,scriptPath); StandardOutReader standardOutReader = new StandardOutReader(); log.info("Executing RawCommand : " + rawCommandInfo.getCommand()); CommandExecutor.executeCommand(rawCommandInfo, this.session, standardOutReader); //Check whether pbs submission is successful or not, if it failed throw and exception in submitJob method // with the error thrown in qsub command // String outputifAvailable = getOutputifAvailable(standardOutReader,"Error reading output of job submission",jobManagerConfiguration.getBaseSubmitCommand()); log.info("Job Submission SSH Command Output: " + outputifAvailable); OutputParser outputParser = jobManagerConfiguration.getParser(); return outputParser.parseJobSubmission(outputifAvailable); }
session.connect(); log.info("Transfering file:/" + localFile + " To:" + serverInfo.getHost() + ":" + remoteFile); SSHUtils.scpTo(remoteFile, localFile, session); retry = 0; reconnect(serverInfo, authenticationInfo); if (retry == 0) { throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file " + serverInfo.getHost() + ":rFile : " + remoteFile, e); throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file " + serverInfo.getHost() + ":rFile : " + remoteFile, e);
CommandOutput commandOutput) throws SSHApiException { String command = commandInfo.getCommand(); throw new SSHApiException("Unable to execute command - ", e); ((ChannelExec) channel).setErrStream(commandOutput.getStandardError()); try { channel.connect(); throw new SSHApiException("Unable to retrieve command output. Command - " + command, e); commandOutput.onOutput(channel);
@Override public String getJobIdByJobName(String jobName, String userName) throws SSHApiException { RawCommandInfo rawCommandInfo = jobManagerConfiguration.getJobIdMonitorCommand(jobName, userName); StandardOutReader stdOutReader = new StandardOutReader(); log.info("Executing RawCommand : " + rawCommandInfo.getCommand()); CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader); String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !", jobManagerConfiguration.getJobIdMonitorCommand(jobName,userName).getCommand()); return jobManagerConfiguration.getParser().parseJobId(jobName, result); }
public synchronized JobDescriptor cancelJob(String jobID) throws SSHApiException { JobStatus jobStatus = getJobStatus(jobID); if (jobStatus == null || jobStatus == JobStatus.U) { log.info("Validation before cancel is failed, couldn't found job in remote host to cancel. Job may be already completed|failed|canceled"); return null; } RawCommandInfo rawCommandInfo = jobManagerConfiguration.getCancelCommand(jobID); StandardOutReader stdOutReader = new StandardOutReader(); log.info("Executing RawCommand : " + rawCommandInfo.getCommand()); CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader); String outputifAvailable = getOutputifAvailable(stdOutReader, "Error reading output of job submission", jobManagerConfiguration.getBaseCancelCommand()); // this might not be the case for all teh resources, if so Cluster implementation can override this method // because here after cancelling we try to get the job description and return it back try { return this.getJobDescriptorById(jobID); } catch (Exception e) { //its ok to fail to get status when the job is gone return null; } }
public Map<String, JobState> getJobStatuses(List<MonitorID> monitorIDs) throws SSHApiException { Map<String, JobStatus> treeMap = new TreeMap<String, JobStatus>(); Map<String, JobState> treeMap1 = new TreeMap<String, JobState>(); // creating a sorted map with all the jobIds and with the predefined // status as UNKNOWN for (MonitorID monitorID : monitorIDs) { treeMap.put(monitorID.getJobID()+","+monitorID.getJobName(), JobStatus.U); } String userName = cluster.getServerInfo().getUserName(); //todo so currently we execute the qstat for each job but we can use user based monitoring //todo or we should concatenate all the commands and execute them in one go and parseSingleJob the response // cluster.getJobStatuses(userName, treeMap); for (String key : treeMap.keySet()) { treeMap1.put(key, getStatusFromString(treeMap.get(key).toString())); } return treeMap1; }
public synchronized void scpThirdParty(String remoteFileSource, String remoteFileTarget) throws SSHApiException { try { if(!session.isConnected()){ session.connect(); } log.info("Transfering from:" + remoteFileSource + " To: " + remoteFileTarget); SSHUtils.scpThirdParty(remoteFileSource, remoteFileTarget, session); } catch (IOException e) { throw new SSHApiException("Failed during scping file:" + remoteFileSource + " to remote file " +remoteFileTarget , e); } catch (JSchException e) { throw new SSHApiException("Failed during scping file:" + remoteFileSource + " to remote file " +remoteFileTarget, e); } }
private String stageInputFiles(Cluster cluster, String paramValue, String parentPath) throws GFacException { try { cluster.scpFrom(paramValue, parentPath); return "file://" + parentPath + File.separator + (new File(paramValue)).getName(); } catch (SSHApiException e) { log.error("Error tranfering remote file to local file, remote path: " + paramValue); throw new GFacException(e); } } }
session.connect(); log.info("Creating directory: " + serverInfo.getHost() + ":" + directoryPath); SSHUtils.makeDirectory(directoryPath, session); retry = 0; } catch (IOException e) { throw new SSHApiException("Failed during creating directory:" + directoryPath + " to remote file " + serverInfo.getHost() + ":rFile", e); } catch (JSchException e) { retry--; throw new SSHApiException("Failed during creating directory :" + directoryPath + " to remote file " + serverInfo.getHost() + ":rFile", e); throw new SSHApiException("Failed during creating directory :" + directoryPath + " to remote file " + serverInfo.getHost() + ":rFile", e);
/** * This method will read standard output and if there's any it will be parsed * * @param jobIDReaderCommandOutput * @param errorMsg * @return * @throws SSHApiException */ private String getOutputifAvailable(StandardOutReader jobIDReaderCommandOutput, String errorMsg, String command) throws SSHApiException { String stdOutputString = jobIDReaderCommandOutput.getStdOutputString(); String stdErrorString = jobIDReaderCommandOutput.getStdErrorString(); log.info("StandardOutput Returned:" + stdOutputString); log.info("StandardError Returned:" +stdErrorString); String[] list = command.split(File.separator); command = list[list.length - 1]; // We are checking for stderr containing the command issued. Thus ignores the verbose logs in stderr. if (stdErrorString != null && stdErrorString.contains(command.trim()) && !stdErrorString.contains("Warning")) { log.error("Standard Error output : " + stdErrorString); throw new SSHApiException(errorMsg + "\n\r StandardOutput: "+ stdOutputString + "\n\r StandardError: "+ stdErrorString); }else if(stdOutputString.contains("error")){ throw new SSHApiException(errorMsg + "\n\r StandardOutput: "+ stdOutputString + "\n\r StandardError: "+ stdErrorString); } return stdOutputString; }
session.connect(); log.info("Transfering from:" + serverInfo.getHost() + ":" + remoteFile + " To:" + "file:/" + localFile); SSHUtils.scpFrom(remoteFile, localFile, session); retry=0; throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file " + serverInfo.getHost() + ":rFile", e); }else{ log.error("Error performing scp but doing a retry"); throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file " + serverInfo.getHost() + ":rFile", e); }else{ log.error("Error performing scp but doing a retry");
/** * This method will read standard output and if there's any it will be parsed * * @param jobIDReaderCommandOutput * @param errorMsg * @return * @throws SSHApiException */ private String getOutputifAvailable(StandardOutReader jobIDReaderCommandOutput, String errorMsg) throws SSHApiException { String stdOutputString = jobIDReaderCommandOutput.getStdOutputString(); String stdErrorString = jobIDReaderCommandOutput.getStdErrorString(); if (stdOutputString == null || stdOutputString.isEmpty() || (stdErrorString != null && !stdErrorString.isEmpty())) { log.error("Standard Error output : " + stdErrorString); throw new SSHApiException(errorMsg + stdErrorString); } return stdOutputString; }
session.connect(); log.info("Listing directory: " + serverInfo.getHost() + ":" + directoryPath); files = SSHUtils.listDirectory(directoryPath, session, false); retry=0; throw new SSHApiException("Failed during listing directory:" + directoryPath + " to remote file ", e); reconnect(serverInfo, authenticationInfo); if (retry == 0) { throw new SSHApiException("Failed during listing directory :" + directoryPath + " to remote file ", e); throw new SSHApiException("Failed during listing directory :" + directoryPath + " to remote file " + serverInfo.getHost() + ":rFile", e);