@SuppressWarnings("unchecked") @Override public <T> Codec<T> newCodec(Class<T> objectClass) { return new SerializableCodec(); } }
@SuppressWarnings("unchecked") protected Codec<Record> getRecordCodec(String codec) { if (codec == null) { return NO_CODEC; } switch (codec) { case "java": return new SerializableCodec<>(); case "avro": return new AvroMessageCodec<>(Record.class); case "avroJson": return new AvroJsonCodec<>(Record.class); case "avroBinary": return new AvroBinaryCodec<>(Record.class); default: throw new IllegalArgumentException("Unknown codec: " + codec); } }
@Override public <T> Codec<T> newCodec(Class<T> objectClass) { switch (encoding) { case "json": return new AvroJsonCodec<>(objectClass); case "binary": return new AvroBinaryCodec<>(objectClass); case "confluent": return new AvroConfluentCodec<>(objectClass, schemaRegistryUrls); case "message": default: return new AvroMessageCodec<>(objectClass, Framework.getService(AvroService.class).getSchemaStore()); } } }
@Override public BulkCommand getCommand(String commandId) { KeyValueStore keyValueStore = getKvStore(); byte[] statusAsBytes = keyValueStore.get(COMMAND_PREFIX + commandId); if (statusAsBytes == null) { return null; } return BulkCodecs.getCommandCodec().decode(statusAsBytes); }
public static void updateStatus(ComputationContext context, BulkStatus delta) { context.produceRecord(OUTPUT_1, delta.getId(), BulkCodecs.getStatusCodec().encode(delta)); }
public TextRenderer(String avroSchemaStorePath, int dataSize) { if (avroSchemaStorePath != null) { schemaStore = new FileAvroSchemaStore(Paths.get(avroSchemaStorePath)); } else { schemaStore = null; } this.dataSize = dataSize; }
public AvroMessageCodec(Class<T> messageClass, AvroSchemaStore store) { this.messageClass = messageClass; schema = ReflectData.get().getSchema(messageClass); if (store != null) { store.addSchema(schema); } encoder = new BinaryMessageEncoder<>(ReflectData.get(), schema); decoder = new BinaryMessageDecoder<>(ReflectData.get(), schema, store); }
public FileAvroSchemaStore(Path schemaDirectoryPath) { this.schemaDirectoryPath = schemaDirectoryPath; File directory = schemaDirectoryPath.toFile(); if (directory.exists()) { if (!directory.isDirectory()) { throw new IllegalArgumentException("Invalid SchemaStore root path: " + schemaDirectoryPath); } loadSchemas(schemaDirectoryPath); } else { directory.mkdirs(); } }
/** * Load the avro schema from this file. */ public void loadSchema(Path schemaPath) { Schema schema; try { schema = new Schema.Parser().parse(schemaPath.toFile()); } catch (IOException e) { throw new IllegalArgumentException("Invalid schema file: " + schemaPath, e); } addSchema(schema); }
@SuppressWarnings("unchecked") protected Codec<Record> getCodecForStreams(String name, Set<String> streams) { Codec<Record> codec = null; Set<String> codecNames = new HashSet<>(); for (String stream : streams) { codec = settings.getCodec(stream); codecNames.add(codec == null ? "none" : codec.getName()); } if (codecNames.size() > 1) { throw new IllegalArgumentException(String.format("Different codecs for computation %s: %s", name, Arrays.toString(codecNames.toArray()))); } if (codec == null) { codec = NO_CODEC; } return codec; }
@Override public BulkStatus getStatus(String commandId) { KeyValueStore keyValueStore = getKvStore(); byte[] statusAsBytes = keyValueStore.get(STATUS_PREFIX + commandId); if (statusAsBytes == null) { log.debug("Request status of unknown command: {}", commandId); return BulkStatus.unknownOf(commandId); } return BulkCodecs.getStatusCodec().decode(statusAsBytes); }
protected void updateStatusAsScrolling(ComputationContext context, String commandId) { BulkStatus delta = BulkStatus.deltaOf(commandId); delta.setState(SCROLLING_RUNNING); delta.setScrollStartTime(Instant.now()); ((ComputationContextImpl) context).produceRecordImmediate(STATUS_STREAM, commandId, BulkCodecs.getStatusCodec().encode(delta)); }
protected KafkaLogTailer(Codec<M> codec, KafkaNamespace ns, String group, Properties consumerProps) { this.codec = codec; if (NO_CODEC.equals(codec)) { this.decodeCodec = new SerializableCodec<>(); } else { this.decodeCodec = codec; } Objects.requireNonNull(group); this.ns = ns; this.group = group; consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, ns.getKafkaGroup(group)); consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, group + "-" + CONSUMER_CLIENT_ID_SEQUENCE.getAndIncrement()); this.consumer = new KafkaConsumer<>(consumerProps); }
public MarkdownRenderer(String avroSchemaStorePath, int dataSize) { if (avroSchemaStorePath != null) { schemaStore = new FileAvroSchemaStore(Paths.get(avroSchemaStorePath)); } else { schemaStore = null; } this.dataSize = dataSize; }
@Override public void processRecord(ComputationContext context, String documentIdsStreamName, Record record) { Codec<DataBucket> codec = BulkCodecs.getDataBucketCodec(); DataBucket in = codec.decode(record.getData()); String commandId = in.getCommandId(); long nbDocuments = in.getCount(); appendToFile(commandId, in.getData()); if (counters.containsKey(commandId)) { counters.put(commandId, nbDocuments + counters.get(commandId)); } else { counters.put(commandId, nbDocuments); } lastBuckets.put(commandId, in); if (counters.get(commandId) < getTotal(commandId)) { return; } finishBlob(context, commandId); }
/** * Stores the command in the kv store, returns the encoded command. */ public byte[] setCommand(BulkCommand command) { KeyValueStore kvStore = getKvStore(); byte[] commandAsBytes = BulkCodecs.getCommandCodec().encode(command); kvStore.put(COMMAND_PREFIX + command.getId(), commandAsBytes); return commandAsBytes; }
@Override public void processRecord(ComputationContext context, String inputStream, Record record) { BulkStatus status = codec.decode(record.getData()); if (IndexAction.ACTION_NAME.equals(status.getAction()) && BulkStatus.State.COMPLETED.equals(status.getState())) { logIndexing(status); BulkService bulkService = Framework.getService(BulkService.class); BulkCommand command = bulkService.getCommand(status.getId()); refreshIndexIfNeeded(command); updateAliasIfNeeded(command); } context.askForCheckpoint(); }
protected void updateStatusAfterScroll(ComputationContext context, String commandId, long documentCount, String errorMessage) { BulkStatus delta = BulkStatus.deltaOf(commandId); if (errorMessage != null) { delta.inError(errorMessage); } if (documentCount == 0) { delta.setState(COMPLETED); delta.setCompletedTime(Instant.now()); } else { delta.setState(RUNNING); } delta.setScrollEndTime(Instant.now()); delta.setTotal(documentCount); ((ComputationContextImpl) context).produceRecordImmediate(STATUS_STREAM, commandId, BulkCodecs.getStatusCodec().encode(delta)); }
@Override public void processRecord(ComputationContext context, String inputStream, Record record) { DataBucket in = codec.decode(record.getData()); if (in.getCount() > 0) { BulkRequest bulkRequest = decodeRequest(in); for (DocWriteRequest request : bulkRequest.requests()) { bulkProcessor.add(request); } BulkStatus delta = BulkStatus.deltaOf(in.getCommandId()); delta.setProcessed(in.getCount()); AbstractBulkComputation.updateStatus(context, delta); } updates = true; }
/** * Stores the status in the kv store returns the encoded status */ public byte[] setStatus(BulkStatus status) { KeyValueStore kvStore = getKvStore(); byte[] statusAsBytes = BulkCodecs.getStatusCodec().encode(status); switch (status.getState()) { case ABORTED: kvStore.put(STATUS_PREFIX + status.getId(), statusAsBytes, ABORTED_TTL_SECONDS); // we remove the command from the kv store, so computation have to handle abort kvStore.put(COMMAND_PREFIX + status.getId(), (String) null); break; case COMPLETED: kvStore.put(STATUS_PREFIX + status.getId(), statusAsBytes, COMPLETED_TTL_SECONDS); kvStore.setTTL(COMMAND_PREFIX + status.getId(), COMPLETED_TTL_SECONDS); break; default: kvStore.put(STATUS_PREFIX + status.getId(), statusAsBytes); } return statusAsBytes; }