@Override public void close() throws Exception { messageWriter.close(); } }
@Override public void init(Map stormConf, TopologyContext topologyContext, WriterConfiguration config) throws Exception { messageWriter.init(); }
@Override public String getName() { return messageWriter.getName(); }
@Override public BulkWriterResponse write(String sensorType, WriterConfiguration configurations, Iterable<Tuple> tuples, List<MESSAGE_T> messages) throws Exception { BulkWriterResponse response = new BulkWriterResponse(); if(messages.size() > 1) { response.addAllErrors(new IllegalStateException("WriterToBulkWriter expects a batch of exactly 1"), tuples); return response; } try { messageWriter.write(sensorType, configurations, Iterables.getFirst(tuples, null), Iterables.getFirst(messages, null)); } catch(Exception e) { response.addAllErrors(e, tuples); return response; } response.addAllSuccesses(tuples); return response; }
@Override public BulkWriterResponse write(String sourceType, WriterConfiguration configurations, Iterable<Tuple> tuples, List<JSONObject> messages) throws Exception { String collection = getCollection(sourceType, configurations); BulkWriterResponse bulkResponse = new BulkWriterResponse(); Collection<SolrInputDocument> docs = toDocs(messages); try { Optional<SolrException> exceptionOptional = fromUpdateResponse(solr.add(collection, docs)); // Solr commits the entire batch or throws an exception for it. There's no way to get partial failures. if(exceptionOptional.isPresent()) { bulkResponse.addAllErrors(exceptionOptional.get(), tuples); } else { if (shouldCommit) { exceptionOptional = fromUpdateResponse(solr.commit(collection, waitFlush, waitSearcher, softCommit)); if(exceptionOptional.isPresent()) { bulkResponse.addAllErrors(exceptionOptional.get(), tuples); } } if(!exceptionOptional.isPresent()) { bulkResponse.addAllSuccesses(tuples); } } } catch(HttpSolrClient.RemoteSolrException sse) { bulkResponse.addAllErrors(sse, tuples); } return bulkResponse; }
verify(batchWriter, times(1)).init(any(), any(), any()); for(int i = 0;i < 4;++i) { Tuple t = tuples.get(i); bolt.execute(t); verify(outputCollector, times(0)).ack(t); verify(batchWriter, times(0)).write(eq(sensorType), any(), any(), any()); BulkWriterResponse writerResponse = new BulkWriterResponse(); writerResponse.addAllSuccesses(tuples); when(batchWriter.write(any(), any(), any(), any())).thenReturn(writerResponse); verify(outputCollector, times(1)).ack(t); verify(batchWriter, times(1)).write(eq(sensorType), any(), any(), any()); verify(outputCollector, times(0)).reportError(any()); verify(outputCollector, times(0)).fail(any());
@Override public BulkWriterResponse write(String sensorType, WriterConfiguration configurations, Iterable<Tuple> tuples, List<JSONObject> messages) throws Exception { if(sleepFunction != null) { sleepFunction.apply(null); } BulkWriterResponse response = new BulkWriterResponse(); response.addAllSuccesses(tuples); return response; }
BulkWriterResponse response = bulkMessageWriter.write(sensorType, configurations, tupleList, messageList); } else if (response.hasErrors()) { throw new IllegalStateException("Unhandled bulk errors in response: " + response.getErrors()); tuplesToAck.removeAll(response.getSuccesses()); response.getErrors().values().forEach(tuplesToAck::removeAll); commit(tuplesToAck);
@Test public void testNonBatchHappyPath() throws Exception { ParserConfigurations configurations = getConfigurations(1); String sensorType = "test"; Tuple t = mock(Tuple.class); when(t.getValueByField(eq("message"))).thenReturn(new JSONObject()); WriterBolt bolt = new WriterBolt(new WriterHandler(writer), configurations, sensorType); bolt.prepare(new HashMap(), topologyContext, outputCollector); verify(writer, times(1)).init(); bolt.execute(t); verify(outputCollector, times(1)).ack(t); verify(writer, times(1)).write(eq(sensorType), any(), any(), any()); verify(outputCollector, times(0)).reportError(any()); verify(outputCollector, times(0)).fail(any()); } @Test
@Test public void shouldWriteManySuccessfully() { // create a few tuples and the messages associated with the tuples List<Tuple> tuples = createTuples(3); List<JSONObject> messages = createMessages(3); // create a document writer which will successfully write all BulkDocumentWriterResults<TupleBasedDocument> results = new BulkDocumentWriterResults<>(); results.addSuccess(createDocument(messages.get(0), tuples.get(0))); results.addSuccess(createDocument(messages.get(1), tuples.get(1))); results.addSuccess(createDocument(messages.get(2), tuples.get(2))); BulkDocumentWriter<TupleBasedDocument> docWriter = mock(BulkDocumentWriter.class); when(docWriter.write()).thenReturn(results); // attempt to write ElasticsearchWriter esWriter = new ElasticsearchWriter(); esWriter.setDocumentWriter(docWriter); esWriter.init(stormConf, topologyContext, writerConfiguration); BulkWriterResponse response = esWriter.write("bro", writerConfiguration, tuples, messages); // response should only contain successes assertFalse(response.hasErrors()); assertTrue(response.getSuccesses().contains(tuples.get(0))); assertTrue(response.getSuccesses().contains(tuples.get(1))); assertTrue(response.getSuccesses().contains(tuples.get(2))); }
public void init(Map stormConf, TopologyContext topologyContext, OutputCollector collector, ParserConfigurations configurations) { if(isBulk) { writerTransformer = config -> configStrategy.createWriterConfig(messageWriter, config); } else { writerTransformer = config -> new SingleBatchConfigurationFacade(configStrategy.createWriterConfig(messageWriter, config)); } try { messageWriter.init(stormConf, topologyContext, writerTransformer.apply(configurations)); } catch (Exception e) { throw new IllegalStateException("Unable to initialize message writer", e); } this.writerComponent = new BulkWriterComponent<JSONObject>(collector, isBulk, isBulk) { @Override protected Collection<Tuple> createTupleCollection() { return new HashSet<>(); } }; }
@Override public WriterConfiguration createWriterConfig(BulkMessageWriter writer, Configurations configs) { if (configs instanceof IndexingConfigurations) { return new IndexingWriterConfiguration(writer.getName(), (IndexingConfigurations) configs); } else { throw new IllegalArgumentException( "Expected config of type IndexingConfigurations but found " + configs.getClass()); } }
public void commit(BulkWriterResponse response) { commit(response.getSuccesses()); }
public void error(String sensorType, BulkWriterResponse errors, MessageGetStrategy messageGetStrategy) { Map<Throwable, Collection<Tuple>> errorMap = errors.getErrors(); for(Map.Entry<Throwable, Collection<Tuple>> entry : errorMap.entrySet()) { error(sensorType, entry.getKey(), entry.getValue(), messageGetStrategy); } }
@Override public BulkWriterResponse write(String sourceType , WriterConfiguration configurations , Iterable<Tuple> tuples , List<JSONObject> messages ) throws Exception { BulkWriterResponse response = new BulkWriterResponse(); // Currently treating all the messages in a group for pass/failure. try { // Messages can all result in different HDFS paths, because of Stellar Expressions, so we'll need to iterate through for(JSONObject message : messages) { String path = getHdfsPathExtension( sourceType, (String)configurations.getSensorConfig(sourceType).getOrDefault(IndexingConfigurations.OUTPUT_PATH_FUNCTION_CONF, ""), message ); SourceHandler handler = getSourceHandler(sourceType, path, configurations); handler.handle(message, sourceType, configurations, syncPolicyCreator); } } catch (Exception e) { response.addAllErrors(e, tuples); } response.addAllSuccesses(tuples); return response; }
@Override public BulkWriterResponse write(String sensorType, WriterConfiguration configurations, Iterable<Tuple> tuples, List<JSONObject> messages) throws Exception { messages.forEach(message -> output.add(message.toJSONString().getBytes())); BulkWriterResponse bulkWriterResponse = new BulkWriterResponse(); bulkWriterResponse.addAllSuccesses(tuples); return bulkWriterResponse; }
@Test public void testNonBatchErrorPath() throws Exception { ParserConfigurations configurations = getConfigurations(1); String sensorType = "test"; Tuple t = mock(Tuple.class); when(t.getValueByField(eq("message"))).thenThrow(new IllegalStateException()); WriterBolt bolt = new WriterBolt(new WriterHandler(writer), configurations, sensorType); bolt.prepare(new HashMap(), topologyContext, outputCollector); verify(writer, times(1)).init(); bolt.execute(t); verify(outputCollector, times(1)).ack(t); verify(writer, times(0)).write(eq(sensorType), any(), any(), any()); verify(outputCollector, times(1)).reportError(any()); verify(outputCollector, times(0)).fail(any()); } @Test
@Test public void shouldWriteSuccessfully() { // create a tuple and a message associated with that tuple List<Tuple> tuples = createTuples(1); List<JSONObject> messages = createMessages(1); // create a document writer which will successfully write all BulkDocumentWriterResults<TupleBasedDocument> results = new BulkDocumentWriterResults<>(); results.addSuccess(createDocument(messages.get(0), tuples.get(0))); BulkDocumentWriter<TupleBasedDocument> docWriter = mock(BulkDocumentWriter.class); when(docWriter.write()).thenReturn(results); // attempt to write ElasticsearchWriter esWriter = new ElasticsearchWriter(); esWriter.setDocumentWriter(docWriter); esWriter.init(stormConf, topologyContext, writerConfiguration); BulkWriterResponse response = esWriter.write("bro", writerConfiguration, tuples, messages); // response should only contain successes assertFalse(response.hasErrors()); assertTrue(response.getSuccesses().contains(tuples.get(0))); }
bulkMessageWriter.init(stormConf, context, writerconf); } catch (Exception e) { throw new RuntimeException(e);
@Test public void shouldWriteSuccessfullyWhenMissingGUID() { // create a tuple and a message associated with that tuple List<Tuple> tuples = createTuples(1); List<JSONObject> messages = createMessages(1); // remove the GUID from the message assertNotNull(messages.get(0).remove(Constants.GUID)); // create a document writer which will successfully write all BulkDocumentWriterResults<TupleBasedDocument> results = new BulkDocumentWriterResults<>(); results.addSuccess(createDocument(messages.get(0), tuples.get(0))); BulkDocumentWriter<TupleBasedDocument> docWriter = mock(BulkDocumentWriter.class); when(docWriter.write()).thenReturn(results); // attempt to write ElasticsearchWriter esWriter = new ElasticsearchWriter(); esWriter.setDocumentWriter(docWriter); esWriter.init(stormConf, topologyContext, writerConfiguration); BulkWriterResponse response = esWriter.write("bro", writerConfiguration, tuples, messages); // response should only contain successes assertFalse(response.hasErrors()); assertTrue(response.getSuccesses().contains(tuples.get(0))); }