Refine search
protected final Map<String, Object> getSessionAttributes(MockHttpServletRequest request) { HttpSession session = request.getSession(false); if (session != null) { Enumeration<String> attrNames = session.getAttributeNames(); if (attrNames != null) { return Collections.list(attrNames).stream(). collect(Collectors.toMap(n -> n, session::getAttribute)); } } return Collections.emptyMap(); }
SegmentsCostCache(ArrayList<Bucket> sortedBuckets) { this.sortedBuckets = Preconditions.checkNotNull(sortedBuckets, "buckets should not be null"); this.intervals = sortedBuckets.stream().map(Bucket::getInterval).collect(Collectors.toCollection(ArrayList::new)); Preconditions.checkArgument( BUCKET_ORDERING.isOrdered(sortedBuckets), "buckets must be ordered by interval" ); }
public String usage() { StringBuilder sb = new StringBuilder(); if ( !namedArgs.isEmpty() ) { sb.append( namedArgs.values().stream().map( NamedArgument::usage ).collect( Collectors.joining( " " ) ) ); } if ( !positionalArgs.isEmpty() ) { sb.append( " " ); positionalArgs.sort( Comparator.comparingInt( PositionalArgument::position ) ); sb.append( positionalArgs.stream().map( PositionalArgument::usage ).collect( Collectors.joining( " " ) ) ); } return sb.toString().trim(); }
@Override public Map<String, Object> attributes() { final Enumeration<String> attributeNames = req.getAttributeNames(); if (!attributeNames.hasMoreElements()) { return Collections.emptyMap(); } return Collections.list(attributeNames).stream() .collect(Collectors.toMap(Function.identity(), req::getAttribute)); }
/** * Get the regions to be reopened when modifying a table. * <p/> * Notice that the {@code openSeqNum} in the returned HRegionLocation is also used to indicate the * state of this region, positive means the region is in {@link State#OPEN}, -1 means * {@link State#OPENING}. And for regions in other states we do not need reopen them. */ public List<HRegionLocation> getRegionsOfTableForReopen(TableName tableName) { return getTableRegionStateNodes(tableName).stream().map(this::createRegionForReopen) .filter(r -> r != null).collect(Collectors.toList()); }
@Benchmark public void serial_lazy_jdk() { Map<Alphagram, List<String>> groupBy = this.jdkWords.stream().collect(Collectors.groupingBy(Alphagram::new)); groupBy.entrySet() .stream() .map(Map.Entry::getValue) .filter(list -> list.size() >= SIZE_THRESHOLD) .sorted(Comparator.<List<String>>comparingInt(List::size).reversed()) .map(list -> list.size() + ": " + list) .forEach(e -> Assert.assertFalse(e.isEmpty())); }
/** * @return Return the regions of the table; does not include OFFLINE unless you set * <code>offline</code> to true. Does not include regions that are in the * {@link State#SPLIT} state. */ private List<RegionInfo> getRegionsOfTable(TableName table, Predicate<RegionStateNode> filter) { return getTableRegionStateNodes(table).stream().filter(filter).map(n -> n.getRegionInfo()) .collect(Collectors.toList()); }
@Override public InputStream asInputStream() throws IOException { checkOpen(); Function<ByteBuffer, ByteSource> byteBufferToByteSource = buf -> new ByteSource() { @Override public InputStream openStream() { ByteBuffer inputBuf = buf.duplicate(); inputBuf.flip(); return new ByteBufferInputStream(inputBuf); } }; return ByteSource.concat(buffers.stream().map(byteBufferToByteSource).collect(Collectors.toList())).openStream(); }
/** Add NER tags to a tree. **/ private static void addNERTags(Tree tree) { // set up tagger if necessary if (NER_TAGGER == null || NER_CLASSIFY_METHOD == null) { setupNERTagger(); } if (NER_TAGGER != null && NER_CLASSIFY_METHOD != null) { // we have everything successfully setup and so can act. try { // classify List<CoreLabel> labels = tree.yield().stream().map(w -> (CoreLabel) w).collect(Collectors.toList()); NER_CLASSIFY_METHOD.invoke(NER_TAGGER, labels); } catch (Exception ex) { log.warn("Error running " + NER_COMBINER_NAME + " on Tree! Not applying NER tags!"); } } }
public List<List<CoreMap>> clusterEntityMentions(List<CoreMap> entityMentions) { List<CoreEntityMention> wrappedEntityMentions = wrapEntityMentions(entityMentions); ArrayList<ArrayList<CoreEntityMention>> entityMentionClusters = new ArrayList<ArrayList<CoreEntityMention>>(); for (CoreEntityMention newEM : wrappedEntityMentions) { boolean clusterMatch = false; for (ArrayList<CoreEntityMention> emCluster : entityMentionClusters) { for (CoreEntityMention clusterEM : emCluster) { if (sameEntityWithoutLinking(newEM, clusterEM)) { emCluster.add(newEM); clusterMatch = true; break; } } if (clusterMatch) break; } if (!clusterMatch) { ArrayList<CoreEntityMention> newCluster = new ArrayList<>(); newCluster.add(newEM); entityMentionClusters.add(newCluster); } } List<List<CoreMap>> coreMapEntityMentionClusters = new ArrayList<List<CoreMap>>(); for (ArrayList<CoreEntityMention> emCluster : entityMentionClusters) { List<CoreMap> coreMapCluster = emCluster.stream().map(coreEM -> coreEM.coreMap()).collect(Collectors.toList()); coreMapEntityMentionClusters.add(coreMapCluster); } return coreMapEntityMentionClusters; }
private void validate() throws IncorrectUsage { for ( String o : parsedArgs.asMap().keySet() ) { if ( !namedArgs.containsKey( o ) ) { throw new IncorrectUsage( format( "unrecognized option: '%s'", o ) ); } } long mandatoryPositionalArgs = positionalArgs.stream() .filter( o -> o instanceof MandatoryPositionalArgument ) .count(); if ( parsedArgs.orphans().size() < mandatoryPositionalArgs ) { throw new IncorrectUsage( "not enough arguments" ); } String excessArgs = parsedArgs.orphans().stream() .skip( positionalArgs.size() ) .collect( Collectors.joining( " " ) ); if ( !excessArgs.isEmpty() ) { throw new IncorrectUsage( format( "unrecognized arguments: '%s'", excessArgs ) ); } }
private RelDataType getRelDataType(StructTypeInfo structTypeInfo) { final List<String> fieldNames = structTypeInfo.getAllStructFieldNames(); final List<RelDataType> relDataTypes = structTypeInfo.getAllStructFieldTypeInfos().stream() .map(this::convertToRelDataType) .collect(Collectors.toList()); return typeFactory.createStructType(relDataTypes, fieldNames); }
@Benchmark public Map<Product, DoubleSummaryStatistics> aggregateByProduct_serial_lazy_jdk() { Map<Product, DoubleSummaryStatistics> result = this.jdkPositions.stream().collect( Collectors.groupingBy( Position::getProduct, Collectors.summarizingDouble(Position::getMarketValue))); Assert.assertNotNull(result); return result; }