@Override public void prepareRun(BatchSinkContext context) throws DatasetManagementException { super.prepareRun(context); String schemaString = tableSinkConfig.getSchemaStr(); if (schemaString != null) { try { Schema schema = Schema.parseJson(schemaString); if (schema.getFields() != null) { FieldOperation operation = new FieldWriteOperation("Write", "Wrote to CDAP Table", EndPoint.of(context.getNamespace(), tableSinkConfig.getName()), schema.getFields().stream().map(Schema.Field::getName) .collect(Collectors.toList())); context.record(Collections.singletonList(operation)); } } catch (IOException e) { throw new IllegalStateException("Failed to parse schema.", e); } } }
case WRITE: FieldWriteOperation write = (FieldWriteOperation) pipelineOperation; validateInputs(pipelineOperation.getName(), write.getInputFields(), validInputsSoFar); updateInvalidOutputs(write.getInputFields(), unusedOutputs, redundantOutputs); break;
case WRITE: FieldWriteOperation write = (FieldWriteOperation) fieldOperation; inputFields = createInputFields(write.getInputFields(), stageName, processedOperations); newOperation = new WriteOperation(newOperationName, write.getDescription(), write.getSink(), inputFields); break;
if (schema.getFields() != null) { FieldOperation operation = new FieldWriteOperation("Write", "Wrote to TPFS dataset", EndPoint.of(context.getNamespace(), tpfsSinkConfig.name), schema.getFields().stream().map(Schema.Field::getName)