HoodieWriteConfig cfg = getConfigBuilder().build();
HoodieWriteClient client = new HoodieWriteClient(jsc, cfg);
FileSystem fs = FSUtils.getFs(basePath, jsc.hadoopConfiguration());
assertNoWriteErrors(statuses);
writeRecords = jsc.parallelize(newRecords, 1);
statuses = client.bulkInsert(writeRecords, newCommitTime).collect();
assertNoWriteErrors(statuses);
.countInstants());
assertEquals("Latest commit should be 002", newCommitTime, timeline.lastInstant().get().getTimestamp());
Dataset<Row> dataSet = getRecords();
assertEquals("Must contain 45 records", 45, dataSet.count());
writeRecords = jsc.parallelize(newRecords, 1);
statuses = client.bulkInsert(writeRecords, newCommitTime).collect();
assertNoWriteErrors(statuses);
.countInstants());
assertEquals("Latest commit should be 003", newCommitTime, timeline.lastInstant().get().getTimestamp());
dataSet = getRecords();
assertEquals("Must contain 47 records", 47, dataSet.count());
assertNoWriteErrors(statuses);
dataSet = getRecords();
assertEquals("Must contain 47 records", 47, dataSet.count());
Row[] rows = (Row[]) dataSet.collect();