private void runBatchUpdate(List<Update> updateRequest) { try { Batch batch; if (config.getLoggedBatch()) { batch = QueryBuilder.batch(updateRequest .toArray(new RegularStatement[updateRequest.size()])); } else { batch = QueryBuilder.unloggedBatch(updateRequest .toArray(new RegularStatement[updateRequest.size()])); } totalCassandraUpdateRequest.addAndGet(updateRequest.size()); ResultSetFuture future = cassandraSession.executeAsync(batch); CallBackListener listener = new CallBackListener(future, null); future.addListener(listener, pool); incrementBatchUpdateCounter(); pendingRequestCounter.incrementAndGet(); } catch (Throwable ex) { LOGGER.error("Error publising metrics in MetricCassandraCollector:" + ex.getMessage()); cassandraErrorCount.increment(); registerError(ex); } finally { updateRequest.clear(); } }
private void runBatchInsert(List<Insert> insertRequest) { try { Batch batch; if (config.getLoggedBatch()) { batch = QueryBuilder.batch(insertRequest .toArray(new RegularStatement[insertRequest.size()])); } else { batch = QueryBuilder.unloggedBatch(insertRequest .toArray(new RegularStatement[insertRequest.size()])); } totalCassandraInsertRequest.addAndGet(insertRequest.size()); ResultSetFuture future = cassandraSession.executeAsync(batch); CallBackListener listener = new CallBackListener(future, null); future.addListener(listener, pool); incrementBatchInsertCounter(); pendingRequestCounter.incrementAndGet(); } catch (Throwable ex) { LOGGER.error("Error publising metrics in MetricCassandraCollector:" + ex.getMessage()); cassandraErrorCount.increment(); registerError(ex); } finally { insertRequest.clear(); } }
@Test(groups = "short") public void batchNonBuiltStatementTest() throws Exception { SimpleStatement simple = new SimpleStatement("INSERT INTO " + TABLE1 + " (k, t) VALUES ('batchTest1', 'val1')"); RegularStatement built = insertInto(TABLE1).value("k", "batchTest2").value("t", "val2"); session().execute(batch().add(simple).add(built)); List<Row> rows = session().execute(select().from(TABLE1).where(in("k", "batchTest1", "batchTest2"))).all(); assertEquals(2, rows.size()); Row r1 = rows.get(0); assertEquals("batchTest1", r1.getString("k")); assertEquals("val1", r1.getString("t")); Row r2 = rows.get(1); assertEquals("batchTest2", r2.getString("k")); assertEquals("val2", r2.getString("t")); }
/** * constructor. * @param session the session */ public BatchBuilderImpl(Session session) { this.session = session; batch = QueryBuilder.batch(); }
public BatchExecutor(MappingSession m) { this.m = m; b = QueryBuilder.batch(new RegularStatement[0]); }
protected void write(int n, boolean batch, ConsistencyLevel cl) { // We don't use insert for our test because the resultSet don't ship the queriedHost // Also note that we don't use tracing because this would trigger requests that screw up the // test for (int i = 0; i < n; ++i) if (batch) // BUG: WriteType == SIMPLE session() .execute( batch() .add(insertInto(tableName).values(new String[] {"k", "i"}, new Object[] {0, 0})) .setConsistencyLevel(cl)); else session() .execute( new SimpleStatement(String.format("INSERT INTO %s(k, i) VALUES (0, 0)", tableName)) .setConsistencyLevel(cl)); }
private void testWideBatchRows(int key) throws Throwable { // Write data Batch q = batch(); for (int i = 0; i < 4000; ++i) { q = q.add(insertInto("wide_batch_rows").value("k", key).value("i", i)); } session().execute(q.setConsistencyLevel(ConsistencyLevel.QUORUM)); // Read data ResultSet rs = session().execute(select("i").from("wide_batch_rows").where(eq("k", key))); // Verify data int i = 0; for (Row row : rs) { assertTrue(row.getInt("i") == i++); } }
batch_query += "APPLY BATCH;"; batch = batch() .add(insertInto(table).values(new String[] {"k", "a"}, new Object[] {42, 1})) .add(update(table).using(ttl(400))); batch_query += String.format("SELECT * FROM %s.test_int WHERE k=42;", keyspace); batch_query += "APPLY BATCH;"; batch = batch(query); assertEquals(batch.getRoutingKey(protocolVersion, codecRegistry), bb); assertEquals(batch.toString(), batch_query); batch_query += "SELECT * FROM foo WHERE k=42;"; batch_query += "APPLY BATCH;"; batch = batch().add(select().from("foo").where(eq("k", 42))); assertEquals(batch.getRoutingKey(protocolVersion, codecRegistry), null); assertEquals(batch.toString(), batch_query); batch_query += "INSERT INTO foo.bar (a) VALUES (123);"; batch_query += "APPLY BATCH;"; batch = batch().using(timestamp(42)).add(insertInto("foo", "bar").value("a", 123)); assertEquals(batch.getRoutingKey(protocolVersion, codecRegistry), null); assertEquals(batch.toString(), batch_query);
@Test( groups = "unit", expectedExceptions = {IllegalArgumentException.class}) public void batchMixedCounterTest() throws Exception { batch() .add(update("foo").with(incr("a", 1))) .add(update("foo").with(set("b", 2))) .add(update("foo").with(incr("c", 3))) .using(timestamp(42)); }
query += "APPLY BATCH;"; batch = batch() .add( insertInto("foo") query += "DELETE a[3] FROM foo WHERE k=1;"; query += "APPLY BATCH;"; batch = batch(delete().listElt("a", 3).from("foo").where(eq("k", 1))); assertEquals(batch.toString(), query); assertEquals(batch().toString(), "BEGIN BATCH APPLY BATCH;");
query += "APPLY BATCH;"; batch = batch() .add(update("foo").with(incr("a", 1))) .add(update("foo").with(incr("b", 2))) query += "APPLY BATCH;"; batch = batch() .add(update("foo").with(incr("a"))) .add(update("foo").with(incr("b"))) query += "APPLY BATCH;"; batch = batch() .add(update("foo").with(decr("a", 1))) .add(update("foo").with(decr("b", 2))) query += "APPLY BATCH;"; batch = batch() .add(update("foo").with(decr("a"))) .add(update("foo").with(decr("b"))) query += "APPLY BATCH;"; batch = batch() .add(update("foo").with(decr("a", -1))) .add(update("foo").with(incr("b", -2)))
@Override public void delete(final Context context, final Resource resource) { final Timer.Context ctx = m_deleteTimer.time(); final ConsistencyLevel writeConsistency = m_contextConfigurations.getWriteConsistency(context); final List<RegularStatement> statements = Lists.newArrayList(); definitelyUnindexResource(statements, context, resource, writeConsistency); definitelyUnindexResourceAttributes(statements, context, resource, writeConsistency); definitelyRemoveMetricName(statements, context, resource, writeConsistency); try { if (!statements.isEmpty()) { m_session.execute(batch(statements.toArray(new RegularStatement[statements.size()]))); } m_cache.delete(context, resource); } finally { ctx.stop(); } }
@Override public void delete(final Context context, final Resource resource) { final Timer.Context ctx = m_deleteTimer.time(); final ConsistencyLevel writeConsistency = m_contextConfigurations.getWriteConsistency(context); final List<RegularStatement> statements = Lists.newArrayList(); definitelyUnindexResource(statements, context, resource, writeConsistency); definitelyUnindexResourceAttributes(statements, context, resource, writeConsistency); definitelyRemoveMetricName(statements, context, resource, writeConsistency); try { if (!statements.isEmpty()) { m_session.execute(batch(statements.toArray(new RegularStatement[statements.size()]))); } m_cache.delete(context, resource); } finally { ctx.stop(); } }
public static <W> void doCql3SaveToCassandra(RDD<W> rdd, ICassandraDeepJobConfig<W> writeConfig, Function1<W, Tuple2<Cells, Cells>> transformer) { if (!writeConfig.getIsWriteConfig()) { throw new IllegalArgumentException("Provided configuration object is not suitable for writing"); } Tuple2<Map<String, ByteBuffer>, Map<String, ByteBuffer>> tuple = new Tuple2<>(null, null); RDD<Tuple2<Cells, Cells>> mappedRDD = rdd.map(transformer, ClassTag$.MODULE$.<Tuple2<Cells, Cells>>apply(tuple.getClass())); ((CassandraDeepJobConfig) writeConfig).createOutputTableIfNeeded(mappedRDD.first()); final int pageSize = writeConfig.getBatchSize(); int offset = 0; List<Tuple2<Cells, Cells>> elements = Arrays.asList((Tuple2<Cells, Cells>[]) mappedRDD.collect()); List<Tuple2<Cells, Cells>> split; do { split = elements.subList(pageSize * (offset++), Math.min(pageSize * offset, elements.size())); Batch batch = QueryBuilder.batch(); for (Tuple2<Cells, Cells> t : split) { Tuple2<String[], Object[]> bindVars = Utils.prepareTuple4CqlDriver(t); Insert insert = QueryBuilder .insertInto(quote(writeConfig.getKeyspace()), quote(writeConfig.getTable())) .values(bindVars._1(), bindVars._2()); batch.add(insert); } writeConfig.getSession().execute(batch); } while (!split.isEmpty() && split.size() == pageSize); }
@Test(groups = "short") public void batchNonBuiltStatementTest() throws Exception { SimpleStatement simple = new SimpleStatement("INSERT INTO " + TABLE1 + " (k, t) VALUES ('batchTest1', 'val1')"); RegularStatement built = insertInto(TABLE1).value("k", "batchTest2").value("t", "val2"); session().execute(batch().add(simple).add(built)); List<Row> rows = session().execute(select().from(TABLE1).where(in("k", "batchTest1", "batchTest2"))).all(); assertEquals(2, rows.size()); Row r1 = rows.get(0); assertEquals("batchTest1", r1.getString("k")); assertEquals("val1", r1.getString("t")); Row r2 = rows.get(1); assertEquals("batchTest2", r2.getString("k")); assertEquals("val2", r2.getString("t")); }
protected void write(int n, boolean batch, ConsistencyLevel cl) { // We don't use insert for our test because the resultSet don't ship the queriedHost // Also note that we don't use tracing because this would trigger requests that screw up the // test for (int i = 0; i < n; ++i) if (batch) // BUG: WriteType == SIMPLE session() .execute( batch() .add(insertInto(tableName).values(new String[] {"k", "i"}, new Object[] {0, 0})) .setConsistencyLevel(cl)); else session() .execute( new SimpleStatement(String.format("INSERT INTO %s(k, i) VALUES (0, 0)", tableName)) .setConsistencyLevel(cl)); }
private void testWideBatchRows(int key) throws Throwable { // Write data Batch q = batch(); for (int i = 0; i < 4000; ++i) { q = q.add(insertInto("wide_batch_rows").value("k", key).value("i", i)); } session().execute(q.setConsistencyLevel(ConsistencyLevel.QUORUM)); // Read data ResultSet rs = session().execute(select("i").from("wide_batch_rows").where(eq("k", key))); // Verify data int i = 0; for (Row row : rs) { assertTrue(row.getInt("i") == i++); } }
batch_query += "APPLY BATCH;"; batch = batch() .add(insertInto(table).values(new String[] {"k", "a"}, new Object[] {42, 1})) .add(update(table).using(ttl(400))); batch_query += String.format("SELECT * FROM %s.test_int WHERE k=42;", keyspace); batch_query += "APPLY BATCH;"; batch = batch(query); assertEquals(batch.getRoutingKey(protocolVersion, codecRegistry), bb); assertEquals(batch.toString(), batch_query); batch_query += "SELECT * FROM foo WHERE k=42;"; batch_query += "APPLY BATCH;"; batch = batch().add(select().from("foo").where(eq("k", 42))); assertEquals(batch.getRoutingKey(protocolVersion, codecRegistry), null); assertEquals(batch.toString(), batch_query); batch_query += "INSERT INTO foo.bar (a) VALUES (123);"; batch_query += "APPLY BATCH;"; batch = batch().using(timestamp(42)).add(insertInto("foo", "bar").value("a", 123)); assertEquals(batch.getRoutingKey(protocolVersion, codecRegistry), null); assertEquals(batch.toString(), batch_query);
query += "APPLY BATCH;"; batch = batch() .add( insertInto("baz", "foo") query += "DELETE a[3] FROM foo WHERE k=1;"; query += "APPLY BATCH;"; batch = batch(delete().listElt("a", 3).from("foo").where(eq("k", 1))); assertEquals(batch.toString(), query); assertEquals(batch().toString(), "BEGIN BATCH APPLY BATCH;");
@Test( groups = "unit", expectedExceptions = {IllegalArgumentException.class}) public void batchMixedCounterTest() throws Exception { batch() .add(update("foo").with(incr("a", 1))) .add(update("foo").with(set("b", 2))) .add(update("foo").with(incr("c", 3))) .using(timestamp(42)); }