private void updateTaskCounts(Object id, List<Integer> tasks) { synchronized (_tracked) { TrackingInfo track = _tracked.get(id); if (track != null) { Map<Integer, Integer> taskEmittedTuples = track.taskEmittedTuples; for (Integer task : tasks) { int newCount = Utils.get(taskEmittedTuples, task, 0) + 1; taskEmittedTuples.put(task, newCount); } } } } }
@SuppressWarnings("deprecation") @Override public void uploadBlobChunk(String session, ByteBuffer chunk) throws AuthorizationException, TException { try { OutputStream os = blobUploaders.get(session); if (os == null) { throw new RuntimeException("Blob for session " + session + " does not exist (or timed out)"); } byte[] array = chunk.array(); int remaining = chunk.remaining(); int offset = chunk.arrayOffset(); int position = chunk.position(); os.write(array, offset + position, remaining); blobUploaders.put(session, os); } catch (Exception e) { LOG.warn("upload blob chunk exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
@SuppressWarnings("deprecation") @Override public void finishBlobUpload(String session) throws AuthorizationException, TException { try { OutputStream os = blobUploaders.get(session); if (os == null) { throw new RuntimeException("Blob for session " + session + " does not exist (or timed out)"); } os.close(); LOG.info("Finished uploading blob for session {}. Closing session.", session); blobUploaders.remove(session); } catch (Exception e) { LOG.warn("finish blob upload exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
@SuppressWarnings("deprecation") @Override public void uploadChunk(String location, ByteBuffer chunk) throws AuthorizationException, TException { try { uploadChunkCalls.mark(); checkAuthorization(null, null, "fileUpload"); WritableByteChannel channel = uploaders.get(location); if (channel == null) { throw new RuntimeException("File for that location does not exist (or timed out)"); } channel.write(chunk); uploaders.put(location, channel); } catch (Exception e) { LOG.warn("uploadChunk exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
@SuppressWarnings("deprecation") @Override public void cancelBlobUpload(String session) throws AuthorizationException, TException { try { AtomicOutputStream os = (AtomicOutputStream) blobUploaders.get(session); if (os == null) { throw new RuntimeException("Blob for session " + session + " does not exist (or timed out)"); } os.cancel(); LOG.info("Canceled uploading blob for session {}. Closing session.", session); blobUploaders.remove(session); } catch (Exception e) { LOG.warn("finish blob upload exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
@SuppressWarnings("deprecation") @Override public void finishFileUpload(String location) throws AuthorizationException, TException { try { finishFileUploadCalls.mark(); checkAuthorization(null, null, "fileUpload"); WritableByteChannel channel = uploaders.get(location); if (channel == null) { throw new RuntimeException("File for that location does not exist (or timed out)"); } channel.close(); LOG.info("Finished uploading file from client: {}", location); uploaders.remove(location); } catch (Exception e) { LOG.warn("finish file upload exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
public void fail(Tuple tuple) { Object id = tuple.getValue(0); synchronized (_tracked) { TrackingInfo track = _tracked.get(id); if (track != null) { track.failed = true; } } checkFinishId(tuple, TupleType.REGULAR); _delegate.fail(tuple); }
@SuppressWarnings("deprecation") @Override public ByteBuffer downloadChunk(String id) throws AuthorizationException, TException { try { downloadChunkCalls.mark(); checkAuthorization(null, null, "fileDownload"); BufferInputStream is = downloaders.get(id); if (is == null) { throw new RuntimeException("Could not find input stream for id " + id); } byte[] ret = is.read(); if (ret.length == 0) { is.close(); downloaders.remove(id); } return ByteBuffer.wrap(ret); } catch (Exception e) { LOG.warn("download chunk exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
@SuppressWarnings("deprecation") @Override public ByteBuffer downloadBlobChunk(String session) throws AuthorizationException, TException { try { BufferInputStream is = blobDownloaders.get(session); if (is == null) { throw new RuntimeException("Blob for session " + session + " does not exist (or timed out)"); } byte[] ret = is.read(); if (ret.length == 0) { is.close(); blobDownloaders.remove(session); } else { blobDownloaders.put(session, is); } LOG.debug("Sending {} bytes", ret.length); return ByteBuffer.wrap(ret); } catch (Exception e) { LOG.warn("download blob chunk exception.", e); if (e instanceof TException) { throw (TException) e; } throw new RuntimeException(e); } }
public void ack(Tuple tuple) { Object id = tuple.getValue(0); synchronized (_tracked) { TrackingInfo track = _tracked.get(id); if (track != null) { track.receivedTuples++; } } boolean failed = checkFinishId(tuple, TupleType.REGULAR); if (failed) { _delegate.fail(tuple); } else { _delegate.ack(tuple); } }
session = Utils.uuid(); } else { keyIt = blobListers.get(session);
@Override public void execute(Tuple tuple) { List<Object> id = tuple.select(_idFields); GlobalStreamId streamId = new GlobalStreamId(tuple.getSourceComponent(), tuple.getSourceStreamId()); if (!_pending.containsKey(id)) { _pending.put(id, new HashMap<GlobalStreamId, Tuple>()); } Map<GlobalStreamId, Tuple> parts = _pending.get(id); if (parts.containsKey(streamId)) { throw new RuntimeException("Received same side of single join twice"); } parts.put(streamId, tuple); if (parts.size() == _numSources) { _pending.remove(id); List<Object> joinResult = new ArrayList<Object>(); for (String outField : _outFields) { GlobalStreamId loc = _fieldLocations.get(outField); joinResult.add(parts.get(loc).getValueByField(outField)); } _collector.emit(new ArrayList<Tuple>(parts.values()), joinResult); for (Tuple part : parts.values()) { _collector.ack(part); } } }
TrackingInfo track = _tracked.get(id); try { if (track != null) {
TupleType type = getTupleType(tuple); synchronized (_tracked) { track = _tracked.get(id); if (track == null) { track = new TrackingInfo();
private void updateTaskCounts(Object id, List<Integer> tasks) { synchronized(_tracked) { TrackingInfo track = _tracked.get(id); if (track != null) { Map<Integer, Integer> taskEmittedTuples = track.taskEmittedTuples; for(Integer task: tasks) { int newCount = get(taskEmittedTuples, task, 0) + 1; taskEmittedTuples.put(task, newCount); } } } } }
public void fail(Tuple tuple) { Object id = tuple.getValue(0); synchronized(_tracked) { TrackingInfo track = _tracked.get(id); if (track != null) track.failed = true; } checkFinishId(tuple, TupleType.REGULAR); _delegate.fail(tuple); }
/** * Returns list of groups for a user * * @param user get groups for this user * @return list of groups for a given user */ @Override public Set<String> getGroups(String user) throws IOException { if(cachedGroups.containsKey(user)) { return cachedGroups.get(user); } Set<String> groups = getUnixGroups(user); if(!groups.isEmpty()) cachedGroups.put(user,groups); return groups; }
public void ack(Tuple tuple) { Object id = tuple.getValue(0); synchronized(_tracked) { TrackingInfo track = _tracked.get(id); if (track != null) track.receivedTuples++; } boolean failed = checkFinishId(tuple, TupleType.REGULAR); if(failed) { _delegate.fail(tuple); } else { _delegate.ack(tuple); } }
@Override public void execute(Tuple tuple) { List<Object> id = tuple.select(_idFields); GlobalStreamId streamId = new GlobalStreamId(tuple.getSourceComponent(), tuple.getSourceStreamId()); if (!_pending.containsKey(id)) { _pending.put(id, new HashMap<GlobalStreamId, Tuple>()); } Map<GlobalStreamId, Tuple> parts = _pending.get(id); if (parts.containsKey(streamId)) throw new RuntimeException("Received same side of single join twice"); parts.put(streamId, tuple); if (parts.size() == _numSources) { _pending.remove(id); List<Object> joinResult = new ArrayList<Object>(); for (String outField : _outFields) { GlobalStreamId loc = _fieldLocations.get(outField); joinResult.add(parts.get(loc).getValueByField(outField)); } _collector.emit(new ArrayList<Tuple>(parts.values()), joinResult); for (Tuple part : parts.values()) { _collector.ack(part); } } }
TupleType type = getTupleType(tuple); synchronized(_tracked) { track = _tracked.get(id); if(track==null) { track = new TrackingInfo();