public ExceptionsView(ExceptionStorageEngine exceptionStorageEngine, int pageSize, int pageNum) { this.exceptionStorageEngine = exceptionStorageEngine; this.pageSize = pageSize; this.pageNum = pageNum; this.exceptionCount = exceptionStorageEngine.getCount(); }
public List<ExceptionData> getExceptions() { return exceptionStorageEngine.getExceptions(pageNum, pageSize); }
@Override public ImmutableList<ExceptionData> getExceptions(int pageNum, int pageSize) { int offset = pageSize * (pageNum - 1); List<ExceptionData> exceptionDatas = new ArrayList<>(pageSize); if(offset < jobHandles.size()) { for (int i = 0; i < pageSize; i++) { int idx = offset + i; if(idx >= getCount()) break; String jobHandle = jobHandles.get(idx); exceptionDatas.add(exceptionDataMap.get(jobHandle)); } } return ImmutableList.copyOf(exceptionDatas); }
public ExceptionStorageEngine getExceptionStorageEngine() { if(exceptionStorageEngine == null) { switch (getEngine()) { case ENGINE_MEMORY: exceptionStorageEngine = new MemoryExceptionStorageEngine(MAX_MEMORY_ENTRIES); break; case ENGINE_POSTGRES: try { exceptionStorageEngine = new PostgresExceptionStorageEngine( postgreSQL.getHost(), postgreSQL.getPort(), postgreSQL.getDbName(), postgreSQL.getUser(), postgreSQL.getPassword(), postgreSQL.getTable() ); } catch (SQLException e) { e.printStackTrace(); exceptionStorageEngine = new NoopExceptionStorageEngine(); } break; default: exceptionStorageEngine = new NoopExceptionStorageEngine(); } } return exceptionStorageEngine; } }
public ClusterJobManager(JobQueueFactory jobQueueFactory, JobHandleFactory jobHandleFactory, UniqueIdFactory uniqueIdFactory, HazelcastInstance hazelcast, QueueMetrics queueMetrics) { super(jobQueueFactory, jobHandleFactory, uniqueIdFactory, new NoopExceptionStorageEngine(), queueMetrics); workMessageTopic = hazelcast.getTopic(WORK_TOPIC); workMessageTopic.addMessageListener(this); }
@Override public boolean storeException(String jobHandle, String uniqueId, byte[] jobData, byte[] exceptionData) { synchronized (jobHandles) { if (jobHandles.size() == maxEntries) { String toRemove = jobHandles.poll(); exceptionDataMap.remove(toRemove); } } ExceptionData data = new ExceptionData(jobHandle, uniqueId, jobData, exceptionData, new LocalDateTime()); jobHandles.offer(jobHandle); exceptionDataMap.put(jobHandle, data); return true; }
if (!validateOrCreateTable()) { throw new SQLException("Unable to validate or create exceptions table. Check credentials.");
public synchronized void handleWorkException(Job job, byte[] exception) { if(job != null) { if (!job.isBackground()) { for (EngineClient client : getClientsForUniqueId(job.getUniqueID())) { client.sendWorkException(job.getJobHandle(), exception); } } exceptionStorageEngine.storeException(job.getJobHandle(), job.getUniqueID(), job.getData(), exception); metrics.handleJobException(job); job.complete(); removeJob(job); } }
public DefaultServerConfiguration() { this.registry = new MetricRegistry(); this.jobHandleFactory = new LocalJobHandleFactory(getHostName()); this.jobQueueFactory = new MemoryJobQueueFactory(registry); this.uniqueIdFactory = new LocalUniqueIdFactory(); this.jobManager = new JobManager(jobQueueFactory, jobHandleFactory, uniqueIdFactory, new NoopExceptionStorageEngine(), getQueueMetrics()); this.jobQueueMonitor = new SnapshottingJobQueueMonitor(getQueueMetrics()); }
final byte[] jobData = rs.getBytes("job_data"); exceptionDataList.add(new ExceptionData(jobHandle, uniqueId, jobData, exceptionData, when)); } catch (Exception e) { LOG.error("Unable to load job '" + rs.getString("unique_id") + "'");
public EmbeddedGearmanServer() { MetricRegistry registry = new MetricRegistry(); JobQueueFactory jobQueueFactory = new MemoryJobQueueFactory(registry); jobHandleFactory = new LocalJobHandleFactory("embedded"); uniqueIdFactory = new LocalUniqueIdFactory(); ExceptionStorageEngine exceptionStore = new NoopExceptionStorageEngine(); queueMetrics = new MetricsEngine(registry); jobManager = new JobManager(jobQueueFactory, jobHandleFactory, uniqueIdFactory, exceptionStore, queueMetrics); }