class BlockingQueue { Queue<String> buffer = new LinkedList<String>(); public void give(String data) { buffer.add(data); notify(); // Since someone may be waiting in take! } public String take() throws InterruptedException { while (buffer.isEmpty()) // don't use "if" due to spurious wakeups. wait(); return buffer.remove(); } }
/** * Given a set of features, add to it all the features directly or indirectly implied by any of * them, and return it. * * @param features the set of features to expand * @return the same set of features, expanded with all implied features */ public static Set<Feature<?>> addImpliedFeatures(Set<Feature<?>> features) { Queue<Feature<?>> queue = new ArrayDeque<>(features); while (!queue.isEmpty()) { Feature<?> feature = queue.remove(); for (Feature<?> implied : feature.getImpliedFeatures()) { if (features.add(implied)) { queue.add(implied); } } } return features; }
/** * delete all file * * @param directory */ public static void deleteAllFile(String directory) { List<File> fileList = new ArrayList<File>(); File directoryFile = new File(directory); Queue<File> queue = new ConcurrentLinkedQueue<File>(); queue.add(directoryFile); while (!queue.isEmpty()) { File file = queue.poll(); if (file.isDirectory()) { File[] fileArray = file.listFiles(); if (fileArray != null) { queue.addAll(Arrays.asList(fileArray)); } } fileList.add(file); } for (int i = fileList.size() - 1; i >= 0; i--) { fileList.get(i).delete(); } }
/** * Gets a list of the unfinished {@link Allocation}s in the order in which those {@link * Allocation}s were encountered. This can be used to display, for example, currently executing * tasks. The order helps to keep the displayed tasks in a deterministic order (new subtasks * appear below older ones) and not jumbled together in some random order. * * @return a list of unfinished {@link Allocation}s */ ImmutableList<Allocation> getUnfinishedAllocations() { Queue<InsertionOrderUnits> unfinishedInsertionOrderUnits = new PriorityQueue<>(); for (InsertionOrderUnits insertionOrderUnits : completionMap.values()) { if (insertionOrderUnits.units.get() < insertionOrderUnits.allocation.getAllocationUnits()) { unfinishedInsertionOrderUnits.add(insertionOrderUnits); } } ImmutableList.Builder<Allocation> unfinishedAllocations = ImmutableList.builderWithExpectedSize(unfinishedInsertionOrderUnits.size()); while (!unfinishedInsertionOrderUnits.isEmpty()) { unfinishedAllocations.add(unfinishedInsertionOrderUnits.remove().allocation); } return unfinishedAllocations.build(); }
public Queue<PipelineConfigQueueEntry> buildQueue() { Queue<PipelineConfigQueueEntry> configQueue = new LinkedList<>(); Queue<PipelineConfigDependencyEntry> tmp = new LinkedList<>(); tmp.add(new PipelineConfigDependencyEntry(this, new ArrayList<>())); while (true) { PipelineConfigDependencyEntry currentHead = tmp.poll(); if (currentHead == null) { break; } PipelineConfigDependencyGraph current = currentHead.getNode(); List<PipelineConfig> currentPath = currentHead.getPath(); currentPath.add(current.getCurrent()); configQueue.add(new PipelineConfigQueueEntry(current.getCurrent(), new ArrayList<>(currentPath))); for (PipelineConfigDependencyGraph upstream : current.getUpstreamDependencies()) { List<PipelineConfig> parentsPath = new ArrayList<>(currentPath); tmp.add(new PipelineConfigDependencyEntry(upstream, parentsPath)); } } return removeHead(configQueue); }
@Override protected void decode(ChannelHandlerContext ctx, Object msg, List<Object> out) throws Exception { if (msg instanceof HttpMessage) { boolean contains = ((HttpMessage) msg).headers().contains(SpdyHttpHeaders.Names.STREAM_ID); if (!contains) { ids.add(NO_ID); } else { ids.add(((HttpMessage) msg).headers().getInt(Names.STREAM_ID)); } } else if (msg instanceof SpdyRstStreamFrame) { ids.remove(((SpdyRstStreamFrame) msg).streamId()); } out.add(ReferenceCountUtil.retain(msg)); } }
public <OUT> OUT findNodePattern(Function<NodePattern<T>, OUT> filter) { Queue<State> todo = new LinkedList<>(); Set<State> seen = new HashSet<>(); todo.add(root); seen.add(root); while (!todo.isEmpty()) { State state = todo.poll(); if (state instanceof NodePatternState) { NodePattern<T> pattern = ((NodePatternState) state).pattern; OUT res = filter.apply(pattern); if (res != null) return res; } if (state.next != null) { for (State s: state.next) { if (!seen.contains(s)) { seen.add(s); todo.add(s); } } } } return null; }
/** * Drains the recency queue, updating eviction metadata that the entries therein were read in * the specified relative order. This currently amounts to adding them to relevant eviction * lists (accounting for the fact that they could have been removed from the map since being * added to the recency queue). */ @GuardedBy("this") void drainRecencyQueue() { ReferenceEntry<K, V> e; while ((e = recencyQueue.poll()) != null) { // An entry may be in the recency queue despite it being removed from // the map . This can occur when the entry was concurrently read while a // writer is removing it from the segment or after a clear has removed // all of the segment's entries. if (accessQueue.contains(e)) { accessQueue.add(e); } } }
@Override public void handle(Message<T> message) { Handler<Message<T>> theHandler; ContextInternal ctx; synchronized (this) { if (demand == 0L) { if (pending.size() < maxBufferedMessages) { pending.add(message); } else { if (discardHandler != null) { discardHandler.handle(message); } else { log.warn("Discarding message as more than " + maxBufferedMessages + " buffered in paused consumer. address: " + address); } } return; } else { if (pending.size() > 0) { pending.add(message); message = pending.poll(); } if (demand != Long.MAX_VALUE) { demand--; } theHandler = handler; } ctx = handlerContext; } deliver(theHandler, message, ctx); }
public synchronized ListenableFuture<?> offer(T element) { requireNonNull(element); if (finishing && borrowerCount == 0) { return immediateFuture(null); } elements.add(element); int newSize = elements.size(); if (newSize == 1) { completeAsync(executor, notEmptySignal); notEmptySignal = SettableFuture.create(); } if (newSize >= targetQueueSize) { return notFullSignal; } return immediateFuture(null); }
public void testHoldsLockOnAllOperations() { create().element(); create().offer("foo"); create().peek(); create().poll(); create().remove(); create().add("foo"); create().addAll(ImmutableList.of("foo")); create().clear(); create().contains("foo"); create().containsAll(ImmutableList.of("foo")); create().equals(new ArrayDeque<>(ImmutableList.of("foo"))); create().hashCode(); create().isEmpty(); create().iterator(); create().remove("foo"); create().removeAll(ImmutableList.of("foo")); create().retainAll(ImmutableList.of("foo")); create().size(); create().toArray(); create().toArray(new String[] {"foo"}); } }
assertTrue(map.isLive(entry, ticker.read())); segment.writeQueue.add(entry); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); segment.expireEntries(ticker.read()); assertNull(map.get(key)); assertTrue(segment.writeQueue.isEmpty());
public boolean tryTransferRemote(AddressedTuple addressedTuple, Queue<AddressedTuple> pendingEmits, ITupleSerializer serializer) { if (pendingEmits != null && !pendingEmits.isEmpty()) { pendingEmits.add(addressedTuple); return false; } if (!remoteBackPressureStatus[addressedTuple.dest].get()) { TaskMessage tm = new TaskMessage(addressedTuple.getDest(), serializer.serialize(addressedTuple.getTuple())); if (transferQueue.tryPublish(tm)) { return true; } } else { LOG.debug("Noticed Back Pressure in remote task {}", addressedTuple.dest); } if (pendingEmits != null) { pendingEmits.add(addressedTuple); } return false; }
/** * Registers the type variables for the given type and all of its superclasses and superinterfaces. */ protected void registerAllTypeVariables(Type classType) { Queue<Type> typesToRegister = new LinkedList<Type>(); Set<Type> registeredTypes = new HashSet<Type>(); typesToRegister.add(classType); while (!typesToRegister.isEmpty()) { Type typeToRegister = typesToRegister.poll(); if (typeToRegister == null || registeredTypes.contains(typeToRegister)) { continue; } registerTypeVariablesOn(typeToRegister); registeredTypes.add(typeToRegister); Class<?> rawType = extractRawTypeOf(typeToRegister); typesToRegister.add(rawType.getGenericSuperclass()); typesToRegister.addAll(Arrays.asList(rawType.getGenericInterfaces())); } }
/** * Helper method to traverse star-tree using BFS and write nodes into the data buffer. */ private static void writeNodes(PinotDataBuffer dataBuffer, long offset, TreeNode rootNode) { Queue<TreeNode> queue = new LinkedList<>(); queue.add(rootNode); int currentNodeId = 0; while (!queue.isEmpty()) { TreeNode node = queue.remove(); if (node._children == null) { offset = writeNode(dataBuffer, offset, node, INVALID_ID, INVALID_ID); } else { // Sort all children nodes based on dimension value List<TreeNode> sortedChildren = new ArrayList<>(node._children.values()); sortedChildren.sort((o1, o2) -> Integer.compare(o1._dimensionValue, o2._dimensionValue)); int firstChildId = currentNodeId + queue.size() + 1; int lastChildId = firstChildId + sortedChildren.size() - 1; offset = writeNode(dataBuffer, offset, node, firstChildId, lastChildId); queue.addAll(sortedChildren); } currentNodeId++; } }
/** * Given a set of features, return a new set of all features directly or indirectly implied by any * of them. * * @param features the set of features whose implications to find * @return the implied set of features */ public static Set<Feature<?>> impliedFeatures(Set<Feature<?>> features) { Set<Feature<?>> impliedSet = new LinkedHashSet<>(); Queue<Feature<?>> queue = new ArrayDeque<>(features); while (!queue.isEmpty()) { Feature<?> feature = queue.remove(); for (Feature<?> implied : feature.getImpliedFeatures()) { if (!features.contains(implied) && impliedSet.add(implied)) { queue.add(implied); } } } return impliedSet; }