public void sort() { tuples.sort(comparator); }
nextEventSource.sort(new EventGenerationTimeComparator());
public static void postInit() { if (INSTANCE.isPostInitialised) { return; } INSTANCE.isPostInitialised = true; (INSTANCE.packets).sort((packetClass1, packetClass2) -> { int com = String.CASE_INSENSITIVE_ORDER.compare(packetClass1.getCanonicalName(), packetClass2.getCanonicalName()); if (com == 0) { com = packetClass1.getCanonicalName().compareTo(packetClass2.getCanonicalName()); } return com; }); }
@Override public void sort(Comparator<? super T> c) { super.sort(c); runChanges(); }
/** * Perform iteration on every node interceptor, passing to ones the list of children to filter * them before inserting into parent node. * * @param parent parent node * @return instance of {@link org.eclipse.che.api.promises.client.Function} with promise that * contains list of intercepted children */ @NotNull private Operation<List<Node>> interceptChildren(@NotNull final Node parent) { return children -> { // In case of nodeInterceptors is empty we still need to call iterate(...) // in order to call set parent on children and call onLoadSuccess(...) LinkedList<NodeInterceptor> sortedByPriorityQueue = new LinkedList<>(nodeInterceptors); sortedByPriorityQueue.sort(priorityComparator); iterate(sortedByPriorityQueue, parent, children); }; }
/** * Add packet to queue. * * @param requestId the opaque identifier indicating the request that caused the changes if the owning session initiated the changes. * @param etag the opaque identifier identifying the version. May be null if packet is not cache-able * @param changeSet the changeSet to create packet from. * @return the packet. */ public synchronized Packet addPacket( @Nullable final Integer requestId, @Nullable final String etag, @Nonnull final ChangeSet changeSet ) { final Packet packet = new Packet( _nextSequence++, requestId, etag, changeSet ); _packets.add( packet ); _packets.sort( null ); return packet; }
private LinkedList<String> getProxiesAndServers() { LinkedList<String> groups = new LinkedList<>(CloudAPI.getInstance().getProxys().stream() .map(ProxyInfo::getServiceId).map(ServiceId::getServerId).collect(Collectors.toList())); groups.addAll(CloudAPI.getInstance().getServers().stream() .map(ServerInfo::getServiceId).map(ServiceId::getServerId).collect(Collectors.toList())); groups.sort(Collections.reverseOrder()); return groups; } }
private LinkedList<String> getProxyAndServerGroups() { LinkedList<String> groups = new LinkedList<>(CloudAPI.getInstance().getProxyGroupMap().keySet()); groups.addAll(CloudAPI.getInstance().getServerGroupMap().keySet()); groups.sort(Collections.reverseOrder()); return groups; }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public <T> Optional<PropertyValuePresenter<T>> getPresenter(Property<T> property) { ObjectUtils.argumentNotNull(property, "Property must be not null"); LOGGER.debug(() -> "Get PropertyValuePresenter for property [" + property + "]"); final LinkedList<PropertyValuePresenter> candidates = new LinkedList<>(); for (Entry<Predicate, PropertyValuePresenter> entry : presenters.entrySet()) { if (entry.getKey().test(property)) { candidates.add(entry.getValue()); } } if (!candidates.isEmpty()) { if (candidates.size() > 1) { // sort by priority candidates.sort(PRIORITY_COMPARATOR); LOGGER.debug(() -> "Get PropertyValuePresenter for property [" + property + "] - return first of candidates: [" + candidates + "]"); } return Optional.of(candidates.getFirst()); } LOGGER.debug(() -> "No PropertyValuePresenter available for property [" + property + "]"); return Optional.empty(); }
public ItineraryBuilder(LocalDateTime startTime, LocalDateTime endTime, RoundingTravelTimeCalculator travelTimeCalculator, List<Event> fixedEvents) { this.startTime = startTime; this.endTime = endTime; this.travelTimeCalculator = travelTimeCalculator; this.fixedEvents = newLinkedList(fixedEvents); activities = TreeMultimap.create(natural(), ARBITRARY_BUT_PREDICTABLE_ORDERING); foods = TreeMultimap.create(natural(), ARBITRARY_BUT_PREDICTABLE_ORDERING); hotels = TreeMultimap.create(natural(), ARBITRARY_BUT_PREDICTABLE_ORDERING); this.fixedEvents.sort(comparing(event -> event.getEventTime().getStart())); }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public <R, T> Optional<PropertyRenderer<R, T>> getRenderer(Class<R> renderingType, Property<? extends T> property) { ObjectUtils.argumentNotNull(property, "Property must be not null"); ObjectUtils.argumentNotNull(renderingType, "Rendering type must be not null"); LOGGER.debug(() -> "Get PropertyRenderer for property [" + property + "] and type [" + renderingType + "]"); final Map<Predicate, PropertyRenderer> renderersForType = renderers.getOrDefault(renderingType, Collections.emptyMap()); final LinkedList<PropertyRenderer> candidates = new LinkedList<>(); for (Entry<Predicate, PropertyRenderer> entry : renderersForType.entrySet()) { if (entry.getKey().test(property)) { candidates.add(entry.getValue()); } } if (!candidates.isEmpty()) { if (candidates.size() > 1) { // sort by priority candidates.sort(PRIORITY_COMPARATOR); LOGGER.debug(() -> "Get PropertyRenderer for property [" + property + "] and type [" + renderingType + "] - return first of candidates: [" + candidates + "]"); } return Optional.of(candidates.getFirst()); } LOGGER.debug( () -> "No PropertyRenderer available for property [" + property + "] and type [" + renderingType + "]"); return Optional.empty(); }
@Override public List<IChannel> getChannels() { LinkedList<IChannel> list = new LinkedList<>(channels.values()); list.sort((c1, c2) -> { int originalPos1 = ((Channel) c1).position; int originalPos2 = ((Channel) c2).position; if (originalPos1 == originalPos2) { return c2.getCreationDate().compareTo(c1.getCreationDate()); } else { return originalPos1 - originalPos2; } }); return list; }
@Override public List<IVoiceChannel> getVoiceChannels() { LinkedList<IVoiceChannel> list = new LinkedList<>(voiceChannels.values()); list.sort((c1, c2) -> { int originalPos1 = ((Channel) c1).position; int originalPos2 = ((Channel) c2).position; if (originalPos1 == originalPos2) { return c2.getCreationDate().compareTo(c1.getCreationDate()); } else { return originalPos1 - originalPos2; } }); return list; }
@Override public List<IRole> getRoles() { LinkedList<IRole> list = new LinkedList<>(roles.values()); list.sort((r1, r2) -> { int originalPos1 = ((Role) r1).position; int originalPos2 = ((Role) r2).position; if (originalPos1 == originalPos2) { return r2.getCreationDate().compareTo(r1.getCreationDate()); } else { return originalPos1 - originalPos2; } }); return list; }
@Override public List<ICategory> getCategories() { LinkedList<ICategory> list = new LinkedList<>(categories.values()); list.sort((c1, c2) -> { int originalPos1 = ((Category) c1).position; int originalPos2 = ((Category) c2).position; if (originalPos1 == originalPos2) { return c2.getCreationDate().compareTo(c1.getCreationDate()); } else { return originalPos1 - originalPos2; } }); return list; }
/** * Searches all logs that belong to the tree. * * @param world The world where the blocks are in. */ public void findLogs(@NotNull final World world) { addAndSearch(world, location); woodBlocks.sort((c1, c2) -> (int) (c1.distanceSq(location) - c2.distanceSq(location))); if (getStumpLocations().isEmpty()) { fillTreeStumps(location.getY()); } }
private void flattenWorkflow(TopologyContext topologyContext, SubGraph subGraph) { ComputeNodesWeightsGraphConsumer consumer = new ComputeNodesWeightsGraphConsumer(); subGraph.browse(consumer); if (consumer.getAllNodes().isEmpty()) { // This is really strange as we have a node template without any workflow step return; } Map<String, WorkflowStep> allNodes = consumer.getAllNodes(); LinkedList<WorkflowStep> sortedByWeightsSteps = new LinkedList<>(allNodes.values()); sortedByWeightsSteps.sort(new WorkflowStepWeightComparator(consumer.getAllNodeWeights(), topologyContext.getTopology())); Set<String> allSubGraphNodeIds = allNodes.keySet(); sortedByWeightsSteps.forEach(workflowStep -> { // Remove all old links between the steps in the graph workflowStep.removeAllPrecedings(allSubGraphNodeIds); workflowStep.removeAllFollowings(allSubGraphNodeIds); }); // Create a sequence with sorted sub graph steps for (int i = 0; i < sortedByWeightsSteps.size() - 1; i++) { sortedByWeightsSteps.get(i).addFollowing(sortedByWeightsSteps.get(i + 1).getName()); sortedByWeightsSteps.get(i + 1).addPreceding(sortedByWeightsSteps.get(i).getName()); } } }
/** * This creates a list of validators, with the a number of validators above and below the local * address. The returned list is sorted. * * @param localAddr The address of the node which signed the parent block * @param countLower The number of validators which have a higher address than localAddr * @param countHigher The number of validators which have a lower address than localAddr * @return A sorted list of validators which matches parameters (including the localAddr). */ private LinkedList<Address> createValidatorList( final Address localAddr, final int countLower, final int countHigher) { final LinkedList<Address> result = Lists.newLinkedList(); // Note: Order of this list is irrelevant, is sorted by value later. result.add(localAddr); for (int i = 0; i < countLower; i++) { result.add(AddressHelpers.calculateAddressWithRespectTo(localAddr, i - countLower)); } for (int i = 0; i < countHigher; i++) { result.add(AddressHelpers.calculateAddressWithRespectTo(localAddr, i + 1)); } result.sort(null); return result; }
public void writeAsVcf(final File output, final File refFile) throws FileNotFoundException { ReferenceSequenceFile ref = new IndexedFastaSequenceFile(refFile); try (VariantContextWriter writer = new VariantContextWriterBuilder() .setOutputFile(output) .setReferenceDictionary(ref.getSequenceDictionary()) .build()) { final VCFHeader vcfHeader = new VCFHeader( VCFUtils.withUpdatedContigsAsLines(Collections.emptySet(), refFile, header.getSequenceDictionary(), false), Collections.singleton(HET_GENOTYPE_FOR_PHASING)); VCFUtils.withUpdatedContigsAsLines(Collections.emptySet(), refFile, header.getSequenceDictionary(), false); vcfHeader.addMetaDataLine(new VCFHeaderLine(VCFHeaderVersion.VCF4_2.getFormatString(), VCFHeaderVersion.VCF4_2.getVersionString())); vcfHeader.addMetaDataLine(new VCFInfoHeaderLine(VCFConstants.ALLELE_FREQUENCY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele Frequency, for each ALT allele, in the same order as listed")); vcfHeader.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.GENOTYPE_KEY, 1, VCFHeaderLineType.String, "Genotype")); vcfHeader.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.PHASE_SET_KEY, 1, VCFHeaderLineType.String, "Phase-set identifier for phased genotypes.")); vcfHeader.addMetaDataLine(new VCFHeaderLine(VCFHeader.SOURCE_KEY,"HaplotypeMap::writeAsVcf")); vcfHeader.addMetaDataLine(new VCFHeaderLine("reference","HaplotypeMap::writeAsVcf")); // vcfHeader.addMetaDataLine(new VCFHeaderLine()); writer.writeHeader(vcfHeader); final LinkedList<VariantContext> variants = new LinkedList<>(this.asVcf(ref)); variants.sort(vcfHeader.getVCFRecordComparator()); variants.forEach(writer::add); } }
public void writeAsVcf(final File output, final File refFile) throws FileNotFoundException { ReferenceSequenceFile ref = new IndexedFastaSequenceFile(refFile); try (VariantContextWriter writer = new VariantContextWriterBuilder() .setOutputFile(output) .setReferenceDictionary(ref.getSequenceDictionary()) .build()) { final VCFHeader vcfHeader = new VCFHeader( VCFUtils.withUpdatedContigsAsLines(Collections.emptySet(), refFile, header.getSequenceDictionary(), false), Collections.singleton(HET_GENOTYPE_FOR_PHASING)); VCFUtils.withUpdatedContigsAsLines(Collections.emptySet(), refFile, header.getSequenceDictionary(), false); vcfHeader.addMetaDataLine(new VCFHeaderLine(VCFHeaderVersion.VCF4_2.getFormatString(), VCFHeaderVersion.VCF4_2.getVersionString())); vcfHeader.addMetaDataLine(new VCFInfoHeaderLine(VCFConstants.ALLELE_FREQUENCY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele Frequency, for each ALT allele, in the same order as listed")); vcfHeader.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.GENOTYPE_KEY, 1, VCFHeaderLineType.String, "Genotype")); vcfHeader.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.PHASE_SET_KEY, 1, VCFHeaderLineType.String, "Phase-set identifier for phased genotypes.")); vcfHeader.addMetaDataLine(new VCFHeaderLine(VCFHeader.SOURCE_KEY,"HaplotypeMap::writeAsVcf")); vcfHeader.addMetaDataLine(new VCFHeaderLine("reference","HaplotypeMap::writeAsVcf")); // vcfHeader.addMetaDataLine(new VCFHeaderLine()); writer.writeHeader(vcfHeader); final LinkedList<VariantContext> variants = new LinkedList<>(this.asVcf(ref)); variants.sort(vcfHeader.getVCFRecordComparator()); variants.forEach(writer::add); } }