@Override protected void processMessage(MessageAndMetadata<byte[], byte[]> message) { try { Collection<Either<JobSpec, URI>> parsedCollection = parseJobSpec(message.message()); for (Either<JobSpec, URI> parsedMessage : parsedCollection) { if (parsedMessage instanceof Either.Left) { this.newSpecs.inc(); this.jobCatalog.put(((Either.Left<JobSpec, URI>) parsedMessage).getLeft()); } else if (parsedMessage instanceof Either.Right) { this.removedSpecs.inc(); this.jobCatalog.remove(((Either.Right<JobSpec, URI>) parsedMessage).getRight()); } } } catch (IOException ioe) { String messageStr = new String(message.message(), Charsets.UTF_8); log.error(String.format("Failed to parse kafka message with offset %d: %s.", message.offset(), messageStr), ioe); } }
@Override protected void startUp() { // listener will add job specs to a blocking queue to send to callers of changedSpecs() // IMPORTANT: This addListener should be invoked after job catalog has been initialized. This is guaranteed because // StreamingKafkaSpecConsumer is boot after jobCatalog in GobblinClusterManager::startAppLauncherAndServices() _jobCatalog.addListener(new JobSpecListener()); _jobMonitor.startAsync().awaitRunning(); }
@Override public Collection<StandardMetrics> getStandardMetricsCollection() { List<StandardMetrics> list = new ArrayList(); list.addAll(this.jobScheduler.getStandardMetricsCollection()); list.addAll(this.multiManager.getStandardMetricsCollection()); list.addAll(this.jobCatalog.getStandardMetricsCollection()); list.addAll(this.jobConfigurationManager.getStandardMetricsCollection()); return list; }
/** {@inheritDoc} */ @Override public void remove(URI uri) { ((MutableJobCatalog)_fallback).remove(uri); }
/** {@inheritDoc} */ @Override public void put(JobSpec jobSpec) { ((MutableJobCatalog)_fallback).put(jobSpec); }
private void deleteJobSpec() throws JobException { boolean runOnce = Boolean.valueOf(jobProps.getProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "false")); boolean hasSchedule = jobProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY); if (runOnce || !hasSchedule) { if (this.jobCatalog != null) { try { if (!this.jobDeleteAttempted) { log.info("Deleting job spec on {}", this.jobUri); this.jobScheduler.unscheduleJob(this.jobUri); this.jobCatalog.remove(new URI(jobUri)); this.jobDeleteAttempted = true; } } catch (URISyntaxException e) { log.error("Failed to remove job with bad uri " + jobUri, e); } } } }
instance.registerWeakJobLifecycleListener(js1Listener); instance.getMutableJobCatalog().put(js1);
private static MutableJobCatalog createMockCatalog(final Map<URI, JobSpec> jobSpecs) { MutableJobCatalog jobCatalog = Mockito.mock(MutableJobCatalog.class); Mockito.doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { JobSpec jobSpec = (JobSpec) invocation.getArguments()[0]; jobSpecs.put(jobSpec.getUri(), jobSpec); return null; } }).when(jobCatalog).put(Mockito.any(JobSpec.class)); Mockito.doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { URI uri = (URI) invocation.getArguments()[0]; jobSpecs.remove(uri); return null; } }).when(jobCatalog).remove(Mockito.any(URI.class)); return jobCatalog; }
/** {@inheritDoc} */ @Override public void remove(URI uri) { ((MutableJobCatalog)_fallback).remove(uri); }
instance.registerWeakJobLifecycleListener(js1Listener); instance.getMutableJobCatalog().put(js1);
@Override public Collection<StandardMetrics> getStandardMetricsCollection() { List<StandardMetrics> list = new ArrayList(); list.addAll(this.jobScheduler.getStandardMetricsCollection()); list.addAll(this.multiManager.getStandardMetricsCollection()); list.addAll(this.jobCatalog.getStandardMetricsCollection()); list.addAll(this.jobConfigurationManager.getStandardMetricsCollection()); return list; }
@Override protected void startUp() { // listener will add job specs to a blocking queue to send to callers of changedSpecs() // IMPORTANT: This addListener should be invoked after job catalog has been initialized. This is guaranteed because // StreamingKafkaSpecConsumer is boot after jobCatalog in GobblinClusterManager::startAppLauncherAndServices() _jobCatalog.addListener(new JobSpecListener()); _jobMonitor.startAsync().awaitRunning(); }
private void deleteJobSpec(boolean alwaysDelete, boolean isDeleted) { if (alwaysDelete && !isDeleted) { try { GobblinHelixJobScheduler.this.jobCatalog.remove(new URI(jobUri)); } catch (URISyntaxException e) { LOGGER.error("Always delete " + jobUri + ". Failed to remove job with bad uri " + jobUri, e); } } }
/** {@inheritDoc} */ @Override public void put(JobSpec jobSpec) { ((MutableJobCatalog)_fallback).put(jobSpec); }
@Override public void run() { boolean alwaysDelete = PropertiesUtils.getPropAsBoolean(this.jobProps, GobblinClusterConfigurationKeys.JOB_ALWAYS_DELETE, "false"); boolean isDeleted = false; try { GobblinHelixJobScheduler.this.jobSchedulerMetrics.updateTimeBeforeJobLaunching(this.jobProps); GobblinHelixJobScheduler.this.jobSchedulerMetrics.updateTimeBetweenJobSchedulingAndJobLaunching(this.creationTimeInMillis, System.currentTimeMillis()); GobblinHelixJobScheduler.this.runJob(this.jobProps, this.jobListener); // remove non-scheduled job catalog once done so it won't be re-executed if (GobblinHelixJobScheduler.this.jobCatalog != null) { try { GobblinHelixJobScheduler.this.jobCatalog.remove(new URI(jobUri)); isDeleted = true; } catch (URISyntaxException e) { LOGGER.error("Failed to remove job with bad uri " + jobUri, e); } } } catch (JobException je) { deleteJobSpec(alwaysDelete, isDeleted); LOGGER.error("Failed to run job " + this.jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY), je); } catch (Exception e) { deleteJobSpec(alwaysDelete, isDeleted); throw e; } } }
@Override protected void processMessage(MessageAndMetadata<byte[], byte[]> message) { try { Collection<Either<JobSpec, URI>> parsedCollection = parseJobSpec(message.message()); for (Either<JobSpec, URI> parsedMessage : parsedCollection) { if (parsedMessage instanceof Either.Left) { this.newSpecs.inc(); this.jobCatalog.put(((Either.Left<JobSpec, URI>) parsedMessage).getLeft()); } else if (parsedMessage instanceof Either.Right) { this.removedSpecs.inc(); this.jobCatalog.remove(((Either.Right<JobSpec, URI>) parsedMessage).getRight()); } } } catch (IOException ioe) { String messageStr = new String(message.message(), Charsets.UTF_8); log.error(String.format("Failed to parse kafka message with offset %d: %s.", message.offset(), messageStr), ioe); } }