@Override public void run() { if (!busyLock.enterBusy()) return; try { locNodeHnd.apply(finalLocNode, finalMsg); } finally { busyLock.leaveBusy(); } } }, plc).listen(logger);
/** {@inheritDoc} */ @Override public void apply(E1 e1, E2 e2) { try { applyx(e1, e2); } catch (IgniteCheckedException e) { throw F.wrap(e); } }
/** * Send finish response. * * @param dest Destination. * @param jobId Job ID. */ private void sendFinishResponse(T dest, HadoopJobId jobId) { if (log.isDebugEnabled()) log.debug("Sent shuffle finish response [jobId=" + jobId + ", dest=" + dest + ']'); HadoopShuffleFinishResponse msg = new HadoopShuffleFinishResponse(jobId); io.apply(dest, msg); }
/** {@inheritDoc} */ @Override public synchronized void doHandshake(IgniteInClosure2X<InputStream, OutputStream> handshakeC) throws IgniteCheckedException { handshakeC.applyx(shmem.inputStream(), shmem.outputStream()); }
/** {@inheritDoc} */ @Override protected void writeToSocket(Socket sock, OutputStream out, TcpDiscoveryAbstractMessage msg, long timeout) throws IOException, IgniteCheckedException { waitFor(writeLock); if (!onMessage(sock, msg)) return; super.writeToSocket(sock, out, msg, timeout); if (afterWrite != null) afterWrite.apply(msg, sock); }
/** {@inheritDoc} */ @Override public void apply(E1 e1, E2 e2) { try { applyx(e1, e2); } catch (IgniteCheckedException e) { throw F.wrap(e); } }
/** {@inheritDoc} */ @Override protected void writeToSocket(Socket sock, TcpDiscoveryAbstractMessage msg, byte[] msgBytes, long timeout) throws IOException { waitFor(writeLock); if (!onMessage(sock, msg)) return; super.writeToSocket(sock, msg, msgBytes, timeout); if (afterWrite != null) afterWrite.apply(msg, sock); }
/** {@inheritDoc} */ @Override public synchronized void doHandshake(IgniteInClosure2X<InputStream, OutputStream> handshakeC) throws IgniteCheckedException { handshakeC.applyx(shmem.inputStream(), shmem.outputStream()); }
/** * Flush remote direct context. * * @param rmtMapIdx Remote map index. * @param rmtDirectCtx Remote direct context. * @param reset Whether to perform reset. */ private void sendShuffleMessage(int rmtMapIdx, @Nullable HadoopDirectDataOutputContext rmtDirectCtx, boolean reset) { if (rmtDirectCtx == null) return; int cnt = rmtDirectCtx.count(); if (cnt == 0) return; int rmtRdcIdx = stripeMappers ? rmtMapIdx % totalReducerCnt : rmtMapIdx; HadoopDirectDataOutputState state = rmtDirectCtx.state(); if (reset) rmtDirectCtx.reset(); HadoopDirectShuffleMessage msg = new HadoopDirectShuffleMessage(job.id(), rmtRdcIdx, cnt, state.buffer(), state.bufferLength(), state.dataLength()); T nodeId = reduceAddrs[rmtRdcIdx]; io.apply(nodeId, msg); remoteShuffleState(nodeId).onShuffleMessage(); }
io.apply(reduceAddrs[rmtRdcIdx], msg);
locNodeHnd.apply(locNode, msg);
io.apply(dest, req);
io.apply(src, new HadoopShuffleAck(msg.id(), msg.jobId()));
expireC.apply(nearEntry, obsoleteVer);
c.apply(entry, obsoleteVer);
c.apply(e1, obsoleteVer);
locNodeHnd.apply(locNode, msg);
expireC.apply(nearEntry, obsoleteVer);
c.apply(entry, obsoleteVer);
c.apply(e1, obsoleteVer);