private byte[] createFlowControlPacket(Address member) throws IOException { try (BufferObjectDataOutput out = createObjectDataOutput(nodeEngine)) { final boolean[] hasData = {false}; Map<Long, ExecutionContext> executionContexts = jobExecutionService.getExecutionContextsFor(member); out.writeInt(executionContexts.size()); executionContexts.forEach((execId, exeCtx) -> uncheckRun(() -> { out.writeLong(execId); out.writeInt(exeCtx.receiverMap().values().stream().mapToInt(Map::size).sum()); exeCtx.receiverMap().forEach((vertexId, ordinalToSenderToTasklet) -> ordinalToSenderToTasklet.forEach((ordinal, senderToTasklet) -> uncheckRun(() -> { out.writeInt(vertexId); out.writeInt(ordinal); out.writeInt(senderToTasklet.get(member).updateAndGetSendSeqLimitCompressed()); hasData[0] = true; }))); })); return hasData[0] ? out.toByteArray() : EMPTY_BYTES; } }
public void send(JoinMessage joinMessage) { if (!running) { return; } final BufferObjectDataOutput out = sendOutput; synchronized (sendLock) { try { out.writeByte(Packet.VERSION); out.writeObject(joinMessage); byte[] processed = outputProcessor != null ? outputProcessor.process(out.toByteArray()) : out.toByteArray(); datagramPacketSend.setData(processed); multicastSocket.send(datagramPacketSend); out.clear(); } catch (IOException e) { logger.warning("You probably have too long Hazelcast configuration!", e); } } } }
public static byte[] createStreamPacketHeader(NodeEngine nodeEngine, long executionId, int destinationVertexId, int ordinal) { try (BufferObjectDataOutput out = createObjectDataOutput(nodeEngine)) { out.writeLong(executionId); out.writeInt(destinationVertexId); out.writeInt(ordinal); return out.toByteArray(); } catch (IOException e) { throw sneakyThrow(e); } }
public void send(JoinMessage joinMessage) { if (!running) { return; } final BufferObjectDataOutput out = sendOutput; synchronized (sendLock) { try { out.writeByte(Packet.VERSION); out.writeObject(joinMessage); byte[] processed = outputProcessor != null ? outputProcessor.process(out.toByteArray()) : out.toByteArray(); datagramPacketSend.setData(processed); multicastSocket.send(datagramPacketSend); out.clear(); } catch (IOException e) { // usually catching EPERM errno // see https://github.com/hazelcast/hazelcast/issues/7198 // For details about the causes look at the following discussion: // https://groups.google.com/forum/#!msg/comp.protocols.tcp-ip/Qou9Sfgr77E/mVQAPaeI-VUJ logger.warning("Sending multicast datagram failed. Exception message saying the operation is not permitted " + "usually means the underlying OS is not able to send packets at a given pace. " + "It can be caused by starting several hazelcast members in parallel when the members send " + "their join message nearly at the same time.", e); } } } }
@Nonnull @Override public ProgressState call() { progTracker.reset(); tryFillInbox(); if (progTracker.isDone()) { return progTracker.toProgressState(); } if (tryFillOutputBuffer()) { progTracker.madeProgress(); connection.write(new Packet(outputBuffer.toByteArray()).setPacketType(Packet.Type.JET)); } return progTracker.toProgressState(); }
private byte[] toBytes(Object obj, int leftPadding, boolean writeHash, PartitioningStrategy strategy, ByteOrder serializerTypeIdByteOrder) { checkNotNull(obj); checkNotNull(serializerTypeIdByteOrder); BufferPool pool = bufferPoolThreadLocal.get(); BufferObjectDataOutput out = pool.takeOutputBuffer(); try { out.position(leftPadding); SerializerAdapter serializer = serializerFor(obj); if (writeHash) { int partitionHash = calculatePartitionHash(obj, strategy); out.writeInt(partitionHash, BIG_ENDIAN); } out.writeInt(serializer.getTypeId(), serializerTypeIdByteOrder); serializer.write(out, obj); return out.toByteArray(); } catch (Throwable e) { throw handleSerializeException(obj, e); } finally { pool.returnOutputBuffer(out); } }
private byte[] toBytes(Object obj, int leftPadding, boolean writeHash, PartitioningStrategy strategy, ByteOrder serializerTypeIdByteOrder) { checkNotNull(obj); checkNotNull(serializerTypeIdByteOrder); BufferPool pool = bufferPoolThreadLocal.get(); BufferObjectDataOutput out = pool.takeOutputBuffer(); try { out.position(leftPadding); SerializerAdapter serializer = serializerFor(obj); if (writeHash) { int partitionHash = calculatePartitionHash(obj, strategy); out.writeInt(partitionHash, BIG_ENDIAN); } out.writeInt(serializer.getTypeId(), serializerTypeIdByteOrder); serializer.write(out, obj); return out.toByteArray(); } catch (Throwable e) { throw handleSerializeException(obj, e); } finally { pool.returnOutputBuffer(out); } }