private void runBatchUpdate(List<Update> updateRequest) { try { Batch batch; if (config.getLoggedBatch()) { batch = QueryBuilder.batch(updateRequest .toArray(new RegularStatement[updateRequest.size()])); } else { batch = QueryBuilder.unloggedBatch(updateRequest .toArray(new RegularStatement[updateRequest.size()])); } totalCassandraUpdateRequest.addAndGet(updateRequest.size()); ResultSetFuture future = cassandraSession.executeAsync(batch); CallBackListener listener = new CallBackListener(future, null); future.addListener(listener, pool); incrementBatchUpdateCounter(); pendingRequestCounter.incrementAndGet(); } catch (Throwable ex) { LOGGER.error("Error publising metrics in MetricCassandraCollector:" + ex.getMessage()); cassandraErrorCount.increment(); registerError(ex); } finally { updateRequest.clear(); } }
private void runBatchInsert(List<Insert> insertRequest) { try { Batch batch; if (config.getLoggedBatch()) { batch = QueryBuilder.batch(insertRequest .toArray(new RegularStatement[insertRequest.size()])); } else { batch = QueryBuilder.unloggedBatch(insertRequest .toArray(new RegularStatement[insertRequest.size()])); } totalCassandraInsertRequest.addAndGet(insertRequest.size()); ResultSetFuture future = cassandraSession.executeAsync(batch); CallBackListener listener = new CallBackListener(future, null); future.addListener(listener, pool); incrementBatchInsertCounter(); pendingRequestCounter.incrementAndGet(); } catch (Throwable ex) { LOGGER.error("Error publising metrics in MetricCassandraCollector:" + ex.getMessage()); cassandraErrorCount.increment(); registerError(ex); } finally { insertRequest.clear(); } }
@Override public void add(List<T> events) { connector.session().execute( QueryBuilder.unloggedBatch(events.stream().map(this::addStatement).toArray(RegularStatement[]::new)).setIdempotent(true)); }
@Override public void addAsync(List<T> events) { connector.session().executeAsync( QueryBuilder.unloggedBatch(events.stream().map(this::addStatement).toArray(RegularStatement[]::new)).setIdempotent(true)); }
@SafeVarargs public final CassandraUtils insert(Map<String, String>... paramss) { Batch batch = QueryBuilder.unloggedBatch(); for (Map<String, String> params : paramss) { String columns = ""; String values = ""; for (String s : params.keySet()) { if (!s.equals(indexColumn)) { columns += s + ","; values = values + params.get(s) + ","; } } columns = columns.substring(0, columns.length() - 1); values = values.substring(0, values.length() - 1); batch.add(new SimpleStatement(new StringBuilder().append("INSERT INTO ") .append(qualifiedTable) .append(" (") .append(columns) .append(") VALUES (") .append(values) .append(");") .toString())); } execute(batch); return this; }
statementsToExecute.add(unloggedBatch(partition.toArray(new RegularStatement[partition.size()])));
statementsToExecute.add(unloggedBatch(partition.toArray(new RegularStatement[partition.size()])));
private List<Statement> toStatements(Set<StatementGenerator> generators) { List<Statement> statementsToExecute = Lists.newArrayList(); Map<String, List<Statement>> statementsByKey = Maps.newHashMap(); for (StatementGenerator generator : generators) { Statement statement = generator.toStatement() .setConsistencyLevel(m_contextConfigurations.getWriteConsistency(generator.getContext())); String key = generator.getKey(); if (key == null) { // Don't try batching these statementsToExecute.add(statement); continue; } // Group these by key List<Statement> statementsForKey = statementsByKey.get(key); if (statementsForKey == null) { statementsForKey = Lists.newArrayList(); statementsByKey.put(key, statementsForKey); } statementsForKey.add(statement); } // Consolidate the grouped statements into batches for (List<Statement> statementsForKey: statementsByKey.values()) { for (List<Statement> partition : Lists.partition(statementsForKey, m_options.getMaxBatchSize())) { statementsToExecute.add(unloggedBatch(partition.toArray(new RegularStatement[partition.size()]))); } } return statementsToExecute; }
private List<Statement> toStatements(Set<StatementGenerator> generators) { List<Statement> statementsToExecute = Lists.newArrayList(); Map<String, List<Statement>> statementsByKey = Maps.newHashMap(); for (StatementGenerator generator : generators) { Statement statement = generator.toStatement() .setConsistencyLevel(m_contextConfigurations.getWriteConsistency(generator.getContext())); String key = generator.getKey(); if (key == null) { // Don't try batching these statementsToExecute.add(statement); continue; } // Group these by key List<Statement> statementsForKey = statementsByKey.get(key); if (statementsForKey == null) { statementsForKey = Lists.newArrayList(); statementsByKey.put(key, statementsForKey); } statementsForKey.add(statement); } // Consolidate the grouped statements into batches for (List<Statement> statementsForKey: statementsByKey.values()) { for (List<Statement> partition : Lists.partition(statementsForKey, m_options.getMaxBatchSize())) { statementsToExecute.add(unloggedBatch(partition.toArray(new RegularStatement[partition.size()]))); } } return statementsToExecute; }
Timestamp now = Timestamp.now(); Batch batch = unloggedBatch();