public ProcessingBucket<I> createNewBucket(final ItemProcessor<I> processor, final AsyncErrorHandler errorHandler) { return new ProcessingBucket<I>(this, cluster, processor, errorHandler); }
/** * Set the name to be used by this bucket's processing thread * * @param name thread name * @throws ExistingRunningThreadException if this bucket is already started */ public void setThreadName(final String name) throws ExistingRunningThreadException { stateWriteLock.lock(); try { ensureNonExistingThread(); threadName = name; } finally { stateWriteLock.unlock(); } }
try { busyProcessing = true; lastProcessing = baselinedCurrentTimeMillis(); } finally { stateWriteLock.unlock(); if (LOGGER.isLoggable(Level.FINER)) LOGGER.finer(getThreadName() + " : processItems() : nothing to process"); fireNothingToProcess(); return; filterQuarantined(); LOGGER.finer(getThreadName() + " : processItems() : only " + workSize + " work items available, waiting for " + batchSize + " items to fill up a batch"); fireNothingToProcess(); reassemble(); return; stateReadLock.lock(); try { secondsSinceLastWorkDone = (baselinedCurrentTimeMillis() - lastWorkDone) / 1000; } finally { stateReadLock.unlock(); final int effectiveBatchSize = determineBatchSize(); if (effectiveBatchSize > maxBatchSizeSinceLastWorkDone) { if (LOGGER.isLoggable(Level.FINER)) LOGGER.finer(getThreadName() + " : processItems() : last work was done " + secondsSinceLastWorkDone + " seconds ago, processing " + effectiveBatchSize + " batch items would exceed the rate limit of " + rateLimit + ", waiting for a while.");
private void processBatchedItems() throws ProcessingException { final int effectiveBatchSize = determineBatchSize(); LOGGER.config(getThreadName() + " : processBatchedItems() : adding " + item + " to next batch"); batch.add(item); } else { if (LOGGER.isLoggable(Level.WARNING)) LOGGER.warning(getThreadName() + " : processBatchedItems() : exception during processing, retrying in " + group.getConfig().getRetryAttemptDelay() + " milliseconds, " + executionsLeft + " retries left : " + e.getMessage()); try { Thread.sleep(group.getConfig().getRetryAttemptDelay()); reassemble();
lootsize += collector.addAllToQuarantined(waiting); if (LOGGER.isLoggable(Level.CONFIG)) LOGGER.config(getThreadName() + " : stealAllMyItemsToQuarantined() : thief:" + collector + " stole " + waiting.size() + " waiting items from us"); waiting.clear(); waitingSize = 0; lootsize += collector.addAllToQuarantined(quarantined); if (LOGGER.isLoggable(Level.CONFIG)) LOGGER.config(getThreadName() + " : stealAllMyItemsToQuarantined() : thief:" + collector + " stole " + quarantined.size() + " quarantined items from us"); quarantined = null;
final ProcessingBucket bucket = localBuckets.get(index); bucket.add(item); } finally { coordinatorReadLock.unlock();
try { busyProcessing = true; lastProcessing = baselinedCurrentTimeMillis(); } finally { stateWriteLock.unlock(); if (LOGGER.isLoggable(Level.FINER)) LOGGER.finer(getThreadName() + " : processItems() : nothing to process"); fireNothingToProcess(); return; filterQuarantined(); LOGGER.finer(getThreadName() + " : processItems() : only " + workSize + " work items available, waiting for " + batchSize + " items to fill up a batch"); fireNothingToProcess(); reassemble(); return; stateReadLock.lock(); try { secondsSinceLastWorkDone = (baselinedCurrentTimeMillis() - lastWorkDone) / 1000; } finally { stateReadLock.unlock(); final int effectiveBatchSize = determineBatchSize(); if (effectiveBatchSize > maxBatchSizeSinceLastWorkDone) { if (LOGGER.isLoggable(Level.FINER)) LOGGER.finer(getThreadName() + " : processItems() : last work was done " + secondsSinceLastWorkDone + " seconds ago, processing " + effectiveBatchSize + " batch items would exceed the rate limit of " + rateLimit + ", waiting for a while.");
private void processBatchedItems() throws ProcessingException { final int effectiveBatchSize = determineBatchSize(); LOGGER.config(getThreadName() + " : processBatchedItems() : adding " + item + " to next batch"); batch.add(item); } else { if (LOGGER.isLoggable(Level.WARNING)) LOGGER.warning(getThreadName() + " : processBatchedItems() : exception during processing, retrying in " + group.getConfig().getRetryAttemptDelay() + " milliseconds, " + executionsLeft + " retries left : " + e.getMessage()); try { Thread.sleep(group.getConfig().getRetryAttemptDelay()); reassemble();
lootsize += collector.addAllToQuarantined(waiting); if (LOGGER.isLoggable(Level.CONFIG)) LOGGER.config(getThreadName() + " : stealAllMyItemsToQuarantined() : thief:" + collector + " stole " + waiting.size() + " waiting items from us"); waiting.clear(); waitingSize = 0; lootsize += collector.addAllToQuarantined(quarantined); if (LOGGER.isLoggable(Level.CONFIG)) LOGGER.config(getThreadName() + " : stealAllMyItemsToQuarantined() : thief:" + collector + " stole " + quarantined.size() + " quarantined items from us"); quarantined = null;
final ProcessingBucket bucket = localBuckets.get(index); bucket.add(item); } finally { coordinatorReadLock.unlock();
try { busyProcessing = true; lastProcessing = baselinedCurrentTimeMillis(); } finally { stateWriteLock.unlock(); if (LOGGER.isLoggable(Level.FINER)) LOGGER.finer(getThreadName() + " : processItems() : nothing to process"); fireNothingToProcess(); return; filterQuarantined(); LOGGER.finer(getThreadName() + " : processItems() : only " + workSize + " work items available, waiting for " + batchSize + " items to fill up a batch"); fireNothingToProcess(); reassemble(); return; stateReadLock.lock(); try { secondsSinceLastWorkDone = (baselinedCurrentTimeMillis() - lastWorkDone) / 1000; } finally { stateReadLock.unlock(); final int effectiveBatchSize = determineBatchSize(); if (effectiveBatchSize > maxBatchSizeSinceLastWorkDone) { if (LOGGER.isLoggable(Level.FINER)) LOGGER.finer(getThreadName() + " : processItems() : last work was done " + secondsSinceLastWorkDone + " seconds ago, processing " + effectiveBatchSize + " batch items would exceed the rate limit of " + rateLimit + ", waiting for a while.");
private void processBatchedItems() throws ProcessingException { final int effectiveBatchSize = determineBatchSize(); LOGGER.config(getThreadName() + " : processBatchedItems() : adding " + item + " to next batch"); batch.add(item); LOGGER.warning(getThreadName() + " : processBatchedItems() : exception during processing, retrying in " + group.getConfig().getRetryAttemptDelay() + " milliseconds, " + executionsLeft + " retries left : " + e.getMessage()); try { Thread.sleep(group.getConfig().getRetryAttemptDelay()); reassemble();
lootsize += collector.addAllToQuarantined(waiting); if (LOGGER.isLoggable(Level.CONFIG)) LOGGER.config(getThreadName() + " : stealAllMyItemsToQuarantined() : thief:" + collector + " stole " + waiting.size() + " waiting items from us"); waiting.clear(); waitingSize = 0; lootsize += collector.addAllToQuarantined(quarantined); if (LOGGER.isLoggable(Level.CONFIG)) LOGGER.config(getThreadName() + " : stealAllMyItemsToQuarantined() : thief:" + collector + " stole " + quarantined.size() + " quarantined items from us"); quarantined = null;
/** * Set the name to be used by this bucket's processing thread * * @param name thread name * @throws ExistingRunningThreadException if this bucket is already started */ public void setThreadName(final String name) throws ExistingRunningThreadException { stateWriteLock.lock(); try { ensureNonExistingThread(); threadName = name; } finally { stateWriteLock.unlock(); } }
public ProcessingBucket<I> createNewBucket(final ItemProcessor<I> processor, final AsyncErrorHandler errorHandler) { return new ProcessingBucket<I>(this, cluster, processor, errorHandler); }
final ProcessingBucket bucket = localBuckets.get(index); bucket.add(item); } finally { coordinatorReadLock.unlock();
/** * Set the name to be used by this bucket's processing thread * * @param name thread name * @throws ExistingRunningThreadException if this bucket is already started */ public void setThreadName(final String name) throws ExistingRunningThreadException { stateWriteLock.lock(); try { ensureNonExistingThread(); threadName = name; } finally { stateWriteLock.unlock(); } }
public ProcessingBucket<I> createNewBucket(final ItemProcessor<I> processor, final AsyncErrorHandler errorHandler) { return new ProcessingBucket<I>(this, cluster, processor, errorHandler); }
void start(final String genericThreadName) throws ExistingRunningThreadException { String name; if (threadName != null) { name = threadName; } else { name = genericThreadName; } bucketWriteLock.lock(); try { ensureNonExistingThread(); processingThread = new Thread(group.getThreadGroup(), new ProcessingThread(), name + " - processing"); processingThread.setDaemon(true); processingThread.start(); } finally { bucketWriteLock.unlock(); } }
void start(final String genericThreadName) throws ExistingRunningThreadException { String name; if (threadName != null) { name = threadName; } else { name = genericThreadName; } bucketWriteLock.lock(); try { ensureNonExistingThread(); processingThread = new Thread(group.getThreadGroup(), new ProcessingThread(), name + " - processing"); processingThread.setDaemon(true); processingThread.start(); } finally { bucketWriteLock.unlock(); } }