For IntelliJ IDEA and
Android Studio


private void myMethod () {}
@Deprecated private String rowToString() { verify(parameters.stream().allMatch(parameter -> parameter.getKind() == ParameterKind.NAMED_TYPE), format("Incorrect parameters for row type %s", parameters)); String fields = parameters.stream() .map(TypeSignatureParameter::getNamedTypeSignature) .map(parameter -> format("%s %s", parameter.getName(), parameter.getTypeSignature().toString())) .collect(Collectors.joining(",")); return format("row(%s)", fields); }
@Override public Type createType(TypeManager typeManager, List<TypeParameter> parameters) { checkArgument(parameters.size() >= 1, "Function type must have at least one parameter, got %s", parameters); checkArgument( parameters.stream().allMatch(parameter -> parameter.getKind() == ParameterKind.TYPE), "Expected only types as a parameters, got %s", parameters); List<Type> types = parameters.stream().map(parameter -> parameter.getType()).collect(toList()); return new FunctionType(types.subList(0, types.size() - 1), types.get(types.size() - 1)); } }
public boolean isDecomposable(FunctionRegistry functionRegistry) { return (getAggregations().entrySet().stream() .map(entry -> functionRegistry.getAggregateFunctionImplementation(entry.getValue().getSignature())) .allMatch(InternalAggregationFunction::isDecomposable)) && getAggregations().entrySet().stream() .allMatch(entry -> !entry.getValue().getCall().getOrderBy().isPresent()); }
@GuardedBy("this") private void checkFlushComplete() { // This buffer type assigns each page to a single, arbitrary reader, // so we don't need to wait for no-more-buffers to finish the buffer. // Any readers added after finish will simply receive no data. BufferState state = this.state.get(); if ((state == FLUSHING) || ((state == NO_MORE_PAGES) && masterBuffer.isEmpty())) { if (safeGetBuffersSnapshot().stream().allMatch(ClientBuffer::isDestroyed)) { destroy(); } } }
@ReadOperation public MetricResponse metric(@Selector String requiredMetricName, @Nullable List<String> tag) { Assert.isTrue(tag == null || tag.stream().allMatch((t) -> t.contains(":")), "Each tag parameter must be in the form key:value"); List<Tag> tags = parseTags(tag); List<Meter> meters = new ArrayList<>(); collectMeters(meters, this.registry, requiredMetricName, tags); if (meters.isEmpty()) { return null; } Map<Statistic, Double> samples = getSamples(meters); Map<String, Set<String>> availableTags = getAvailableTags(meters); tags.forEach((t) -> availableTags.remove(t.getKey())); return new MetricResponse(requiredMetricName, asList(samples, Sample::new), asList(availableTags, AvailableTag::new)); }
SegmentProvider(List<DataSegment> segments) { Preconditions.checkArgument(segments != null && !segments.isEmpty()); final String dataSource = segments.get(0).getDataSource(); Preconditions.checkArgument( segments.stream().allMatch(segment -> segment.getDataSource().equals(dataSource)), "segments should have the same dataSource" ); this.segments = segments; this.dataSource = dataSource; this.interval = JodaUtils.umbrellaInterval( segments.stream().map(DataSegment::getInterval).collect(Collectors.toList()) ); }
@Override public synchronized void noMoreSplits(PlanNodeId sourceId) { noMoreSplits.add(sourceId); boolean allSourcesComplete = Stream.concat(fragment.getPartitionedSourceNodes().stream(), fragment.getRemoteSourceNodes().stream()) .filter(Objects::nonNull) .map(PlanNode::getId) .allMatch(noMoreSplits::contains); if (allSourcesComplete) { taskStateMachine.finished(); } }
@Override public Boolean visitCall(CallExpression call, Void context) { Signature signature = call.getSignature(); if (registry.isRegistered(signature) && !registry.getScalarFunctionImplementation(signature).isDeterministic()) { return false; } return call.getArguments().stream() .allMatch(expression -> expression.accept(this, context)); }
private static List<ColumnStatistics> toFileStats(List<List<ColumnStatistics>> stripes) { if (stripes.isEmpty()) { return ImmutableList.of(); } int columnCount = stripes.get(0).size(); checkArgument(stripes.stream().allMatch(stripe -> columnCount == stripe.size())); ImmutableList.Builder<ColumnStatistics> fileStats = ImmutableList.builder(); for (int i = 0; i < columnCount; i++) { int column = i; fileStats.add(ColumnStatistics.mergeColumnStatistics(stripes.stream() .map(stripe -> stripe.get(column)) .collect(toList()))); } return fileStats.build(); }
@Override public PlanNode visitProject(ProjectNode node, RewriteContext<Expression> context) { Set<Symbol> deterministicSymbols = node.getAssignments().entrySet().stream() .filter(entry -> DeterminismEvaluator.isDeterministic(entry.getValue())) .map(Map.Entry::getKey) .collect(Collectors.toSet()); Predicate<Expression> deterministic = conjunct -> SymbolsExtractor.extractUnique(conjunct).stream() .allMatch(deterministicSymbols::contains); Map<Boolean, List<Expression>> conjuncts = extractConjuncts(context.get()).stream().collect(Collectors.partitioningBy(deterministic)); // Push down conjuncts from the inherited predicate that don't depend on non-deterministic assignments PlanNode rewrittenNode = context.defaultRewrite(node, inlineSymbols(node.getAssignments().getMap(), combineConjuncts(conjuncts.get(true)))); // All non-deterministic conjuncts, if any, will be in the filter node. if (!conjuncts.get(false).isEmpty()) { rewrittenNode = new FilterNode(idAllocator.getNextId(), rewrittenNode, combineConjuncts(conjuncts.get(false))); } return rewrittenNode; }
private Map<TableColumnStatisticsCacheKey, Optional<HiveColumnStatistics>> loadColumnStatistics(Iterable<? extends TableColumnStatisticsCacheKey> keys) { if (Iterables.isEmpty(keys)) { return ImmutableMap.of(); } HiveTableName hiveTableName = stream(keys).findFirst().get().getHiveTableName(); checkArgument(stream(keys).allMatch(key -> key.getHiveTableName().equals(hiveTableName)), "all keys must relate to same hive table"); Set<String> columnNames = stream(keys).map(TableColumnStatisticsCacheKey::getColumnName).collect(Collectors.toSet()); Optional<Map<String, HiveColumnStatistics>> columnStatistics = delegate.getTableColumnStatistics(hiveTableName.getDatabaseName(), hiveTableName.getTableName(), columnNames); ImmutableMap.Builder<TableColumnStatisticsCacheKey, Optional<HiveColumnStatistics>> resultMap = ImmutableMap.builder(); for (TableColumnStatisticsCacheKey key : keys) { if (!columnStatistics.isPresent() || !columnStatistics.get().containsKey(key.getColumnName())) { resultMap.put(key, Optional.empty()); } else { resultMap.put(key, Optional.of(columnStatistics.get().get(key.getColumnName()))); } } return resultMap.build(); }
private static void verifyFileHasColumnNames(List<String> physicalColumnNames, Path path) { if (!physicalColumnNames.isEmpty() && physicalColumnNames.stream().allMatch(physicalColumnName -> DEFAULT_HIVE_COLUMN_NAME_PATTERN.matcher(physicalColumnName).matches())) { throw new PrestoException( HIVE_FILE_MISSING_COLUMN_NAMES, "ORC file does not contain column names in the footer: " + path); } }
public boolean isPartitionedOn(Collection<Symbol> columns, Set<Symbol> knownConstants) { // partitioned on (k_1, k_2, ..., k_n) => partitioned on (k_1, k_2, ..., k_n, k_n+1, ...) // can safely ignore all constant columns when comparing partition properties return arguments.stream() .filter(ArgumentBinding::isVariable) .map(ArgumentBinding::getColumn) .filter(symbol -> !knownConstants.contains(symbol)) .allMatch(columns::contains); }
@Override public void actionPerformed(ActionEvent e) { Set<PartStack> partStacks = new HashSet<>(3); partStacks.add(workspaceAgent.getPartStack(NAVIGATION)); partStacks.add(workspaceAgent.getPartStack(TOOLING)); partStacks.add(workspaceAgent.getPartStack(INFORMATION)); Set<State> states = partStacks.stream().map(PartStack::getPartStackState).collect(toSet()); if (states.stream().anyMatch(state -> MINIMIZED == state)) { partStacks.forEach(PartStack::restore); activateEditor(); return; } PartPresenter activePart = workspaceAgent.getActivePart(); if (activePart == null || !(activePart instanceof EditorPartPresenter)) { activateEditor(); return; } if (states.stream().allMatch(state -> HIDDEN == state)) { partStacks.forEach(PartStack::show); activateEditor(); return; } partStacks.forEach(PartStack::minimize); activateEditor(); }