/** * Logs flushing messages and performs backoff waiting if there is a wait time for retry. */ private void initFlushOperation(String bulkLoggingID, boolean retryOperation, long retriedDocs, long waitTime) { if (retryOperation) { if (waitTime > 0L) { debugLog(bulkLoggingID, "Retrying [%d] entries after backing off for [%s] ms", retriedDocs, TimeValue.timeValueMillis(waitTime)); try { Thread.sleep(waitTime); } catch (InterruptedException e) { debugLog(bulkLoggingID, "Thread interrupted - giving up on retrying..."); throw new EsHadoopException("Thread interrupted - giving up on retrying...", e); } } else { debugLog(bulkLoggingID, "Retrying [%d] entries immediately (without backoff)", retriedDocs); } } else { debugLog(bulkLoggingID, "Sending batch of [%d] bytes/[%s] entries", data.length(), dataEntries); } }
debugLog(bulkLoggingID, "Submitting request"); RestClient.BulkActionResponse bar = restClient.bulk(resource, data); debugLog(bulkLoggingID, "Response received"); totalAttempts++; totalTime += bar.getTimeSpent(); debugLog(bulkLoggingID, "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted.", totalDocs, totalAttempts, debugLog(bulkLoggingID, "Failed. %s", ex.getMessage()); hadWriteErrors = true; throw ex;
/** * Logs flushing messages and performs backoff waiting if there is a wait time for retry. */ private void initFlushOperation(String bulkLoggingID, boolean retryOperation, long retriedDocs, long waitTime) { if (retryOperation) { if (waitTime > 0L) { debugLog(bulkLoggingID, "Retrying [%d] entries after backing off for [%s] ms", retriedDocs, TimeValue.timeValueMillis(waitTime)); try { Thread.sleep(waitTime); } catch (InterruptedException e) { debugLog(bulkLoggingID, "Thread interrupted - giving up on retrying..."); throw new EsHadoopException("Thread interrupted - giving up on retrying...", e); } } else { debugLog(bulkLoggingID, "Retrying [%d] entries immediately (without backoff)", retriedDocs); } } else { debugLog(bulkLoggingID, "Sending batch of [%d] bytes/[%s] entries", data.length(), dataEntries); } }
/** * Logs flushing messages and performs backoff waiting if there is a wait time for retry. */ private void initFlushOperation(String bulkLoggingID, boolean retryOperation, long retriedDocs, long waitTime) { if (retryOperation) { if (waitTime > 0L) { debugLog(bulkLoggingID, "Retrying [%d] entries after backing off for [%s] ms", retriedDocs, TimeValue.timeValueMillis(waitTime)); try { Thread.sleep(waitTime); } catch (InterruptedException e) { debugLog(bulkLoggingID, "Thread interrupted - giving up on retrying..."); throw new EsHadoopException("Thread interrupted - giving up on retrying...", e); } } else { debugLog(bulkLoggingID, "Retrying [%d] entries immediately (without backoff)", retriedDocs); } } else { debugLog(bulkLoggingID, "Sending batch of [%d] bytes/[%s] entries", data.length(), dataEntries); } }
/** * Logs flushing messages and performs backoff waiting if there is a wait time for retry. */ private void initFlushOperation(String bulkLoggingID, boolean retryOperation, long retriedDocs, long waitTime) { if (retryOperation) { if (waitTime > 0L) { debugLog(bulkLoggingID, "Retrying [%d] entries after backing off for [%s] ms", retriedDocs, TimeValue.timeValueMillis(waitTime)); try { Thread.sleep(waitTime); } catch (InterruptedException e) { debugLog(bulkLoggingID, "Thread interrupted - giving up on retrying..."); throw new EsHadoopException("Thread interrupted - giving up on retrying...", e); } } else { debugLog(bulkLoggingID, "Retrying [%d] entries immediately (without backoff)", retriedDocs); } } else { debugLog(bulkLoggingID, "Sending batch of [%d] bytes/[%s] entries", data.length(), dataEntries); } }
debugLog(bulkLoggingID, "Submitting request"); RestClient.BulkActionResponse bar = restClient.bulk(resource, data); debugLog(bulkLoggingID, "Response received"); totalAttempts++; totalTime += bar.getTimeSpent(); debugLog(bulkLoggingID, "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted.", totalDocs, totalAttempts, debugLog(bulkLoggingID, "Failed. %s", ex.getMessage()); hadWriteErrors = true; throw ex;
debugLog(bulkLoggingID, "Submitting request"); RestClient.BulkActionResponse bar = restClient.bulk(resource, data); debugLog(bulkLoggingID, "Response received"); totalAttempts++; totalTime += bar.getTimeSpent(); debugLog(bulkLoggingID, "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted.", totalDocs, totalAttempts, debugLog(bulkLoggingID, "Failed. %s", ex.getMessage()); hadWriteErrors = true; throw ex;
debugLog(bulkLoggingID, "Submitting request"); RestClient.BulkActionResponse bar = restClient.bulk(resource, data); debugLog(bulkLoggingID, "Response received"); totalAttempts++; totalTime += bar.getTimeSpent(); debugLog(bulkLoggingID, "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted.", totalDocs, totalAttempts, debugLog(bulkLoggingID, "Failed. %s", ex.getMessage()); hadWriteErrors = true; throw ex;