@Override public ITupleReference add(ITupleReference tuple) throws HyracksDataException { bulkLoader.add(tuple); return tuple; }
@Override public Throwable getFailure() { if (bulkLoader.hasFailed()) { return bulkLoader.getFailure(); } else if (buddyBTreeBulkLoader.hasFailed()) { return buddyBTreeBulkLoader.getFailure(); } return null; } }
private void bulkloadIntTupleRange(int begin, int end) throws Exception { if (end < begin) { throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]"); } IIndexBulkLoader bulkloader = index.createBulkLoader(1.0f, false, end - begin, true); for (int i = begin; i <= end; i++) { TupleUtils.createIntegerTuple(builder, tuple, i); bulkloader.add(tuple); } bulkloader.end(); }
@Override public boolean hasFailed() { return bulkLoader.hasFailed(); }
@Override public Throwable getFailure() { return bulkLoader.getFailure(); } }
public static void bulkLoadCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples, boolean filtered) throws HyracksDataException { int fieldCount = ctx.getFieldCount(); int numTuples = checkTuples.size(); ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fieldCount + 1) : new ArrayTupleBuilder(fieldCount); ArrayTupleReference tuple = new ArrayTupleReference(); // Perform bulk load. IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples, false); int c = 1; for (CheckTuple checkTuple : checkTuples) { if (LOGGER.isInfoEnabled()) { //if (c % (numTuples / 10) == 0) { LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples); //} } createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes(), filtered); bulkLoader.add(tuple); c++; } bulkLoader.end(); }
@Override public Throwable getFailure() { return bulkLoader.getFailure(); } }
@Override public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception { btree.create(); long start = System.currentTimeMillis(); IIndexBulkLoader bulkLoader = btree.createBulkLoader(1.0f, false, 0L, true); for (int i = 0; i < numBatches; i++) { TupleBatch batch = dataGen.tupleBatchQueue.take(); for (int j = 0; j < batch.size(); j++) { bulkLoader.add(batch.get(j)); } } bulkLoader.end(); long end = System.currentTimeMillis(); long time = end - start; return time; } }
@Override public ITupleReference add(ITupleReference tuple) throws HyracksDataException { bulkLoader.add(tuple); return tuple; }
@Override public void end() throws HyracksDataException { if (!endedBloomFilterLoad) { bulkLoader.end(); endedBloomFilterLoad = true; } }
@Override public void cleanupArtifacts() throws HyracksDataException { if (!endedBloomFilterLoad) { bulkLoader.abort(); endedBloomFilterLoad = true; } }
@Override public boolean hasFailed() { return bulkLoader.hasFailed(); }
@Override public void initialize() throws HyracksDataException { IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator(); // Build the index indexBuilder.build(); // Open the index indexHelper.open(); try { ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); // Create bulk loader IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false, parameters); // Load files for (ExternalFile file : files) { bulkLoader.add(filesTupleTranslator.getTupleFromFile(file)); } bulkLoader.end(); } finally { indexHelper.close(); } }
@Override public ITupleReference add(ITupleReference tuple) throws HyracksDataException { bulkLoader.add(tuple); return tuple; }
@Override public void end() throws HyracksDataException { bulkLoader.end(); }
@Override public void abort() throws HyracksDataException { if (btreeBulkloader != null) { btreeBulkloader.abort(); } } }