@Override public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException { return new LSMIndexCompactOperatorNodePushable(ctx, partition, indexHelperFactory); } }
@Override public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException { return new LSMIndexInsertUpdateDeleteOperatorNodePushable(ctx, partition, indexHelperFactory, fieldPermutation, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0), op, modCallbackFactory, tupleFilterFactory); } }
@Override public void open() throws HyracksDataException { super.open(); frameTuple = new FrameTupleReference(); abstractModCallback = (AbstractIndexModificationOperationCallback) modCallback; }
@Override public JsonNode toJson(IPersistedResourceRegistry registry) throws HyracksDataException { ObjectNode jsonObject = registry.getClassIdentifier(getClass(), serialVersionUID); super.appendToJson(jsonObject, registry); return jsonObject; }
if (!tryLockSucceed) { operatorNodePushable.flushPartialFrame();
@Override public JobSpecification buildCompactJobSpec() throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName()); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec, dataflowHelperFactory); compactOp.setSourceLocation(sourceLoc); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, secondaryPartitionConstraint); spec.addRoot(compactOp); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } }
public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName, MetadataProvider metadataProvider) throws AlgebricksException { String dataverseName = dataverse.getDataverseName(); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName); } JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset); IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec, indexHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); spec.addRoot(compactOp); return spec; }
@Override protected void appendToJson(final ObjectNode json, IPersistedResourceRegistry registry) throws HyracksDataException { super.appendToJson(json, registry); json.putPOJO("bloomFilterKeyFields", bloomFilterKeyFields); json.put("bloomFilterFalsePositiveRate", bloomFilterFalsePositiveRate); json.put("isPrimary", isPrimary); json.putPOJO("btreeFields", btreeFields); json.putPOJO("compressorDecompressorFactory", compressorDecompressorFactory.toJson(registry)); } }
case INSERT: { if (!lsmAccessor.tryInsert(tuple)) { flushPartialFrame(nextFlushTupleIndex, i); nextFlushTupleIndex = i; lsmAccessor.insert(tuple); flushPartialFrame(nextFlushTupleIndex, i); nextFlushTupleIndex = i; lsmAccessor.delete(tuple); flushPartialFrame(nextFlushTupleIndex, i); nextFlushTupleIndex = i; lsmAccessor.upsert(tuple); flushPartialFrame(nextFlushTupleIndex, i); nextFlushTupleIndex = i; lsmAccessor.update(tuple); } else { flushPartialFrame(nextFlushTupleIndex, tupleCount);
@Override protected void appendToJson(final ObjectNode json, IPersistedResourceRegistry registry) throws HyracksDataException { super.appendToJson(json, registry); final ArrayNode btreeCmpFactoriesArray = OBJECT_MAPPER.createArrayNode(); for (IBinaryComparatorFactory factory : btreeCmpFactories) { btreeCmpFactoriesArray.add(factory.toJson(registry)); } json.set("btreeCmpFactories", btreeCmpFactoriesArray); json.set("linearizeCmpFactory", linearizeCmpFactory.toJson(registry)); final ArrayNode valueProviderFactoriesArray = OBJECT_MAPPER.createArrayNode(); for (IPrimitiveValueProviderFactory factory : valueProviderFactories) { valueProviderFactoriesArray.add(factory.toJson(registry)); } json.set("valueProviderFactories", valueProviderFactoriesArray); json.set("rtreePolicyType", rtreePolicyType.toJson(registry)); json.putPOJO("rtreeFields", rtreeFields); json.putPOJO("buddyBTreeFields", buddyBTreeFields); json.put("isPointMBR", isPointMBR); json.put("bloomFilterFalsePositiveRate", bloomFilterFalsePositiveRate); }
@Override public JsonNode toJson(IPersistedResourceRegistry registry) throws HyracksDataException { ObjectNode jsonObject = registry.getClassIdentifier(getClass(), serialVersionUID); super.appendToJson(jsonObject, registry); ArrayNode btreeCmpFactoriesArray = OBJECT_MAPPER.createArrayNode(); for (IBinaryComparatorFactory factory : btreeCmpFactories) { btreeCmpFactoriesArray.add(factory.toJson(registry)); } jsonObject.set("btreeCmpFactories", btreeCmpFactoriesArray); jsonObject.set("linearizeCmpFactory", linearizeCmpFactory.toJson(registry)); final ArrayNode valueProviderFactoriesArray = OBJECT_MAPPER.createArrayNode(); for (IPrimitiveValueProviderFactory factory : valueProviderFactories) { valueProviderFactoriesArray.add(factory.toJson(registry)); } jsonObject.set("valueProviderFactories", valueProviderFactoriesArray); jsonObject.set("rtreePolicyType", rtreePolicyType.toJson(registry)); jsonObject.putPOJO("rtreeFields", rtreeFields); jsonObject.put("isPointMBR", isPointMBR); return jsonObject; }
@Override public JsonNode toJson(IPersistedResourceRegistry registry) throws HyracksDataException { ObjectNode jsonObject = registry.getClassIdentifier(getClass(), serialVersionUID); super.appendToJson(jsonObject, registry); final ArrayNode tokenTypeTraitsArray = OBJECT_MAPPER.createArrayNode(); for (ITypeTraits tt : tokenTypeTraits) { tokenTypeTraitsArray.add(tt.toJson(registry)); } jsonObject.set("tokenTypeTraits", tokenTypeTraitsArray); final ArrayNode tokenCmpFactoriesArray = OBJECT_MAPPER.createArrayNode(); for (IBinaryComparatorFactory factory : tokenCmpFactories) { tokenCmpFactoriesArray.add(factory.toJson(registry)); } jsonObject.set("tokenCmpFactories", tokenCmpFactoriesArray); jsonObject.set("tokenizerFactory", tokenizerFactory.toJson(registry)); jsonObject.put("isPartitioned", isPartitioned); jsonObject.putPOJO("invertedIndexFields", invertedIndexFields); jsonObject.putPOJO("filterFieldsForNonBulkLoadOps", filterFieldsForNonBulkLoadOps); jsonObject.putPOJO("invertedIndexFieldsForNonBulkLoadOps", invertedIndexFieldsForNonBulkLoadOps); jsonObject.putPOJO("bloomFilterFalsePositiveRate", bloomFilterFalsePositiveRate); return jsonObject; }