private void printBatchBuildInfo(OptionSet options, PrintStream ps, BatchBuildInfo batchBuildInfo) throws Exception { ps.println(" + Submitted at: " + new DateTime(batchBuildInfo.getSubmitTime()).toString()); Boolean finishedSuccessful = batchBuildInfo.isFinishedSuccessful(); ps.println(" + State: " + (finishedSuccessful == null ? "pending..." : (finishedSuccessful ? "SUCCESS" : "FAILED"))); ps.println(" + Hadoop jobs: " + (batchBuildInfo.getMapReduceJobTrackingUrls().isEmpty() ? "(none)" : "")); for (Map.Entry<String, String> jobEntry : batchBuildInfo.getMapReduceJobTrackingUrls().entrySet()) { ps.println(" + Hadoop Job ID: " + jobEntry.getKey()); ps.println(" + Tracking URL: " + jobEntry.getValue()); } ps.println(" + Batch build CLI arguments:"); printArguments(batchBuildInfo.getBatchIndexCliArguments(), 8, ps, options.has("dump")); }
public BatchBuildInfo finishedSuccessfully(boolean finishedSuccessful) { BatchBuildInfo batchBuildInfo = new BatchBuildInfo(this); batchBuildInfo.finishedSuccessful = finishedSuccessful; return batchBuildInfo; }
BatchBuildInfo batchBuildInfo = indexerDefinition.getActiveBatchBuildInfo(); if (batchBuildInfo != null) { Set<String> jobs = batchBuildInfo.getMapReduceJobTrackingUrls().keySet();
@Override public void jobStarted(String jobId, String trackingUrl) { try { // Lock internal bypasses the index-in-delete-state check, which does not matter (and might cause // failure) in our case. String lock = indexerModel.lockIndexerInternal(indexerName, false); try { IndexerDefinition definition = indexerModel.getFreshIndexer(indexerName); BatchBuildInfo batchBuildInfo = new BatchBuildInfo(definition.getActiveBatchBuildInfo()).withJob(jobId, trackingUrl); IndexerDefinition updatedDefinition = new IndexerDefinitionBuilder().startFrom(definition) .activeBatchBuildInfo(batchBuildInfo) .build(); indexerModel.updateIndexerInternal(updatedDefinition); log.info("Updated indexer batch build state for indexer " + indexerName); } finally { indexerModel.unlockIndexer(lock, true); } } catch (Exception e) { log.error("failed to update indexer batch build state for indexer " + indexerName); } } }
BatchBuildInfo batchBuildInfo = new BatchBuildInfo(activeJobInfo); batchBuildInfo = batchBuildInfo.finishedSuccessfully(success);
Set<String> jobs = indexer.getActiveBatchBuildInfo().getMapReduceJobTrackingUrls().keySet(); for (String jobId : jobs) { RunningJob job = jobClient.getJob(jobId);
@Override public void jobStarted(String jobId, String trackingUrl) { try { // Lock internal bypasses the index-in-delete-state check, which does not matter (and might cause // failure) in our case. String lock = indexerModel.lockIndexerInternal(indexerName, false); try { IndexerDefinition definition = indexerModel.getFreshIndexer(indexerName); BatchBuildInfo batchBuildInfo = new BatchBuildInfo(definition.getActiveBatchBuildInfo()).withJob(jobId, trackingUrl); IndexerDefinition updatedDefinition = new IndexerDefinitionBuilder().startFrom(definition) .activeBatchBuildInfo(batchBuildInfo) .build(); indexerModel.updateIndexerInternal(updatedDefinition); log.info("Updated indexer batch build state for indexer " + indexerName); } finally { indexerModel.unlockIndexer(lock, true); } } catch (Exception e) { log.error("failed to update indexer batch build state for indexer " + indexerName); } } }
BatchBuildInfo batchBuildInfo = new BatchBuildInfo(activeJobInfo); batchBuildInfo = batchBuildInfo.finishedSuccessfully(success);
private void printBatchBuildInfo(OptionSet options, PrintStream ps, BatchBuildInfo batchBuildInfo) throws Exception { ps.println(" + Submitted at: " + new DateTime(batchBuildInfo.getSubmitTime()).toString()); Boolean finishedSuccessful = batchBuildInfo.isFinishedSuccessful(); ps.println(" + State: " + (finishedSuccessful == null ? "pending..." : (finishedSuccessful ? "SUCCESS" : "FAILED"))); ps.println(" + Hadoop jobs: " + (batchBuildInfo.getMapReduceJobTrackingUrls().isEmpty() ? "(none)" : "")); for (Map.Entry<String, String> jobEntry : batchBuildInfo.getMapReduceJobTrackingUrls().entrySet()) { ps.println(" + Hadoop Job ID: " + jobEntry.getKey()); ps.println(" + Tracking URL: " + jobEntry.getValue()); } ps.println(" + Batch build CLI arguments:"); printArguments(batchBuildInfo.getBatchIndexCliArguments(), 8, ps, options.has("dump")); }
public BatchBuildInfo finishedSuccessfully(boolean finishedSuccessful) { BatchBuildInfo batchBuildInfo = new BatchBuildInfo(this); batchBuildInfo.finishedSuccessful = finishedSuccessful; return batchBuildInfo; }
Set<String> jobs = indexer.getActiveBatchBuildInfo().getMapReduceJobTrackingUrls().keySet(); for (String jobId : jobs) { RunningJob job = jobClient.getJob(JobID.forName(jobId));
private void setBatchBuildInfo(BatchBuildInfo buildInfo, ObjectNode batchNode) { batchNode.put("submitTime", buildInfo.getSubmitTime()); Boolean isFinishedSuccessful = buildInfo.isFinishedSuccessful(); if (isFinishedSuccessful == null) { batchNode.put("finishedSuccessful", batchNode.nullNode()); } else { batchNode.put("finishedSuccessful", isFinishedSuccessful); } ObjectNode jobs = batchNode.putObject("mapReduceJobTrackingUrls"); for (Map.Entry<String, String> entry : buildInfo.getMapReduceJobTrackingUrls().entrySet()) { jobs.put(entry.getKey(), entry.getValue()); } setStringArrayProperty(batchNode, "batchIndexCliArguments", buildInfo.getBatchIndexCliArguments()); } }
public BatchBuildInfo withJob(String jobId, String jobTrackingUrl) { Map<String, String> newMap = Maps.newHashMap(mapReduceJobTrackingUrls); newMap.put(jobId, jobTrackingUrl); BatchBuildInfo batchBuildInfo = new BatchBuildInfo(this); batchBuildInfo.mapReduceJobTrackingUrls = newMap; return batchBuildInfo; }
private void setBatchBuildInfo(BatchBuildInfo buildInfo, ObjectNode batchNode) { batchNode.put("submitTime", buildInfo.getSubmitTime()); Boolean isFinishedSuccessful = buildInfo.isFinishedSuccessful(); if (isFinishedSuccessful == null) { batchNode.put("finishedSuccessful", batchNode.nullNode()); } else { batchNode.put("finishedSuccessful", isFinishedSuccessful); } ObjectNode jobs = batchNode.putObject("mapReduceJobTrackingUrls"); for (Map.Entry<String, String> entry : buildInfo.getMapReduceJobTrackingUrls().entrySet()) { jobs.put(entry.getKey(), entry.getValue()); } setStringArrayProperty(batchNode, "batchIndexCliArguments", buildInfo.getBatchIndexCliArguments()); } }
public BatchBuildInfo withJob(String jobId, String jobTrackingUrl) { Map<String, String> newMap = Maps.newHashMap(mapReduceJobTrackingUrls); newMap.put(jobId, jobTrackingUrl); BatchBuildInfo batchBuildInfo = new BatchBuildInfo(this); batchBuildInfo.mapReduceJobTrackingUrls = newMap; return batchBuildInfo; }
private BatchBuildInfo parseBatchBuildInfo(ObjectNode buildNode) { Map<String, String> jobs = new HashMap<String, String>(); ObjectNode jobsNode = JsonUtil.getObject(buildNode, "mapReduceJobTrackingUrls"); Iterator<String> it = jobsNode.getFieldNames(); while (it.hasNext()) { String key = it.next(); String value = JsonUtil.getString(jobsNode, key); jobs.put(key, value); } BatchBuildInfo batchBuildInfo = new BatchBuildInfo( JsonUtil.getLong(buildNode, "submitTime"), JsonUtil.getBoolean(buildNode, "finishedSuccessful"), jobs, getStringArrayProperty(buildNode, "batchIndexCliArguments")); return batchBuildInfo; }
private BatchBuildInfo parseBatchBuildInfo(ObjectNode buildNode) { Map<String, String> jobs = new HashMap<String, String>(); ObjectNode jobsNode = JsonUtil.getObject(buildNode, "mapReduceJobTrackingUrls"); Iterator<String> it = jobsNode.getFieldNames(); while (it.hasNext()) { String key = it.next(); String value = JsonUtil.getString(jobsNode, key); jobs.put(key, value); } BatchBuildInfo batchBuildInfo = new BatchBuildInfo( JsonUtil.getLong(buildNode, "submitTime"), JsonUtil.getBoolean(buildNode, "finishedSuccessful"), jobs, getStringArrayProperty(buildNode, "batchIndexCliArguments")); return batchBuildInfo; }
BatchBuildInfo jobInfo = new BatchBuildInfo(System.currentTimeMillis(), null, null, batchArguments); updatedIndexer .activeBatchBuildInfo(jobInfo)
BatchBuildInfo jobInfo = new BatchBuildInfo(System.currentTimeMillis(), null, null, batchArguments); updatedIndexer .activeBatchBuildInfo(jobInfo)