@Override public String getDisplayName() { ImmutableList<String> names = getTypeParameters().stream() .map(Type::getDisplayName) .collect(toImmutableList()); return "function<" + Joiner.on(",").join(names) + ">"; }
@Override String reportItem(Item<?> item) { List<Object> factoryArgs = argGroups.get(item.groupNumber).get(item.itemNumber); return factory.getName() + "(" + Joiner.on(", ").useForNull("null").join(factoryArgs) + ")"; } });
private List<MetricDto> searchMetrics(DbSession dbSession, ComponentTreeRequest request) { List<String> metricKeys = requireNonNull(request.getMetricKeys()); List<MetricDto> metrics = dbClient.metricDao().selectByKeys(dbSession, metricKeys); if (metrics.size() < metricKeys.size()) { List<String> foundMetricKeys = Lists.transform(metrics, MetricDto::getKey); Set<String> missingMetricKeys = Sets.difference( new LinkedHashSet<>(metricKeys), new LinkedHashSet<>(foundMetricKeys)); throw new NotFoundException(format("The following metric keys are not found: %s", COMMA_JOINER.join(missingMetricKeys))); } String forbiddenMetrics = metrics.stream() .filter(metric -> ComponentTreeAction.FORBIDDEN_METRIC_TYPES.contains(metric.getValueType())) .map(MetricDto::getKey) .sorted() .collect(MoreCollectors.join(COMMA_JOINER)); checkArgument(forbiddenMetrics.isEmpty(), "Metrics %s can't be requested in this web service. Please use api/measures/component", forbiddenMetrics); return metrics; }
.filter(column -> !column.isHidden()) .map(ColumnMetadata::getName) .collect(toImmutableList()); .map(Identifier::getValue) .map(column -> column.toLowerCase(ENGLISH)) .collect(toImmutableList()); analysis.setInsert(new Analysis.Insert( targetTableHandle.get(), insertColumns.stream().map(columnHandles::get).collect(toImmutableList()))); .collect(toImmutableList()); Iterable<Type> queryTypes = transform(queryScope.getRelationType().getVisibleFields(), Field::getType); "Table: [" + Joiner.on(", ").join(tableTypes) + "], " + "Query: [" + Joiner.on(", ").join(queryTypes) + "]");
public static final List<TestColumn> TEST_COLUMNS = ImmutableList.<TestColumn>builder() .add(new TestColumn("p_empty_string", javaStringObjectInspector, "", Slices.EMPTY_SLICE, true)) .add(new TestColumn("p_string", javaStringObjectInspector, "test", Slices.utf8Slice("test"), true)) .add(new TestColumn("p_empty_varchar", javaHiveVarcharObjectInspector, "", Slices.EMPTY_SLICE, true)) ImmutableMap.of("test", "test"), ImmutableMap.of((byte) 1, (byte) 1), ImmutableMap.of(new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH), new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH)), .add(new TestColumn("t_array_empty", getStandardListObjectInspector(javaStringObjectInspector), ImmutableList.of(), arrayBlockOf(createUnboundedVarcharType()))) .add(new TestColumn("t_array_string", getStandardListObjectInspector(javaStringObjectInspector), ImmutableList.of("test"), arrayBlockOf(createUnboundedVarcharType(), "test"))) testColumns = ImmutableList.copyOf(filter(testColumns, not(TestColumn::isPartitionKey))); .map(TestColumn::getType) .map(HiveType::valueOf) .map(type -> type.getType(TYPE_MANAGER)) tableProperties.setProperty("columns", Joiner.on(',').join(transform(testColumns, TestColumn::getName))); tableProperties.setProperty("columns.types", Joiner.on(',').join(transform(testColumns, TestColumn::getType)));
private static ConnectorPageSource createPageSource(HiveTransactionHandle transaction, HiveClientConfig config, File outputFile) { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, config.getHiveStorageFormat().getInputFormat()); splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerDe()); splitProperties.setProperty("columns", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getName).collect(toList()))); splitProperties.setProperty("columns.types", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getHiveType).map(hiveType -> hiveType.getHiveTypeName().toString()).collect(toList()))); HiveSplit split = new HiveSplit( SCHEMA_NAME, TABLE_NAME, "", "file:///" + outputFile.getAbsolutePath(), 0, outputFile.length(), outputFile.length(), splitProperties, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), false, TupleDomain.all(), ImmutableMap.of(), Optional.empty(), false); HivePageSourceProvider provider = new HivePageSourceProvider(config, createTestHdfsEnvironment(config), getDefaultHiveRecordCursorProvider(config), getDefaultHiveDataStreamFactories(config), TYPE_MANAGER); return provider.createPageSource(transaction, getSession(config), split, ImmutableList.copyOf(getColumnHandles())); }
@Override protected Node visitShowFunctions(ShowFunctions node, Void context) ImmutableList.Builder<Expression> rows = ImmutableList.builder(); for (SqlFunction function : metadata.listFunctions()) { rows.add(row( new StringLiteral(function.getSignature().getName()), new StringLiteral(function.getSignature().getReturnType().toString()), new StringLiteral(Joiner.on(", ").join(function.getSignature().getArgumentTypes())), new StringLiteral(getFunctionType(function)), function.isDeterministic() ? TRUE_LITERAL : FALSE_LITERAL, Map<String, String> columns = ImmutableMap.<String, String>builder() .put("function_name", "Function") .put("return_type", "Return Type") .map(entry -> aliasedName(entry.getKey(), entry.getValue())) .collect(toImmutableList())), aliased(new Values(rows.build()), "functions", ImmutableList.copyOf(columns.keySet())), ordering( new SortItem(
StageStats stageStats = stageInfo.get().getStageStats(); double avgPositionsPerTask = stageInfo.get().getTasks().stream().mapToLong(task -> task.getStats().getProcessedInputPositions()).average().orElse(Double.NaN); double squaredDifferences = stageInfo.get().getTasks().stream().mapToDouble(task -> Math.pow(task.getStats().getProcessedInputPositions() - avgPositionsPerTask, 2)).sum(); double sdAmongTasks = Math.sqrt(squaredDifferences / stageInfo.get().getTasks().size()); builder.append(indentString(1)) .append(format("Output layout: [%s]\n", Joiner.on(", ").join(partitioningScheme.getOutputLayout()))); .map(argument -> { if (argument.isConstant()) { NullableValue constant = argument.getConstant(); .collect(toImmutableList()); builder.append(indentString(1)); if (replicateNullsAndAny) { builder.append(format("Output partitioning: %s (replicate nulls and any) [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn()))); builder.append(format("Output partitioning: %s [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn())));
ImmutableMap.<String, Object>builder() .put("tags", Joiner.on(", ").join(pattern.tags)) .put("severity", pattern.severity) .put("providesFix", pattern.providesFix.displayInfo()) .put("className", pattern.className) .put("summary", pattern.summary.trim()) .put("altNames", Joiner.on(", ").join(pattern.altNames)) .put("explanation", pattern.explanation.trim()); ImmutableMap.<String, String>builder() .put("title", pattern.name) .put("summary", pattern.summary) .put("layout", "bugpattern") .put("tags", Joiner.on(", ").join(pattern.tags)) .put("severity", pattern.severity.toString()) .put("providesFix", pattern.providesFix.toString()) .map(new PathToExampleInfo(pattern.className)) .sorted(Comparator.comparing(ExampleInfo::name)) .collect(Collectors.toList()); writer.write("\n----------\n\n"); if (!positiveExamples.isEmpty()) { writer.write("### Positive examples\n"); for (ExampleInfo positiveExample : positiveExamples) {
static String formatGroupBy(List<GroupingElement> groupingElements, Optional<List<Expression>> parameters) { ImmutableList.Builder<String> resultStrings = ImmutableList.builder(); for (GroupingElement groupingElement : groupingElements) { String result = ""; if (groupingElement instanceof SimpleGroupBy) { List<Expression> columns = ((SimpleGroupBy) groupingElement).getExpressions(); if (columns.size() == 1) { result = formatExpression(getOnlyElement(columns), parameters); } else { result = formatGroupingSet(columns, parameters); } } else if (groupingElement instanceof GroupingSets) { result = format("GROUPING SETS (%s)", Joiner.on(", ").join( ((GroupingSets) groupingElement).getSets().stream() .map(e -> formatGroupingSet(e, parameters)) .iterator())); } else if (groupingElement instanceof Cube) { result = format("CUBE %s", formatGroupingSet(((Cube) groupingElement).getExpressions(), parameters)); } else if (groupingElement instanceof Rollup) { result = format("ROLLUP %s", formatGroupingSet(((Rollup) groupingElement).getExpressions(), parameters)); } resultStrings.add(result); } return Joiner.on(", ").join(resultStrings.build()); }
private String formatMessage(long extraSleepTime, Map<String, GcTimes> gcTimesAfterSleep, Map<String, GcTimes> gcTimesBeforeSleep) { Set<String> gcBeanNames = Sets.intersection( gcTimesAfterSleep.keySet(), gcTimesBeforeSleep.keySet()); List<String> gcDiffs = Lists.newArrayList(); for (String name : gcBeanNames) { GcTimes diff = gcTimesAfterSleep.get(name).subtract( gcTimesBeforeSleep.get(name)); if (diff.gcCount != 0) { gcDiffs.add("GC pool '" + name + "' had collection(s): " + diff.toString()); } } String ret = "Detected pause in JVM or host machine (eg GC): " + "pause of approximately " + extraSleepTime + "ms\n"; if (gcDiffs.isEmpty()) { ret += "No GCs detected"; } else { ret += Joiner.on("\n").join(gcDiffs); } return ret; }
/** * Generate DDL for dropping partitions of a table. * <p> * ALTER TABLE finalTableName DROP IF EXISTS PARTITION partition_spec, PARTITION partition_spec, ...; * </p> * @param finalTableName Table name where partitions are dropped * @param partitionDMLInfos list of Partition to be dropped * @return DDL to drop partitions in <code>finalTableName</code> */ public static List<String> generateDropPartitionsDDL(final String dbName, final String finalTableName, final List<Map<String, String>> partitionDMLInfos) { if (partitionDMLInfos.isEmpty()) { return Collections.emptyList(); } List<String> ddls = Lists.newArrayList(); ddls.add(String.format("USE %s %n", dbName)); // Join the partition specs ddls.add(String.format("ALTER TABLE %s DROP IF EXISTS %s", finalTableName, Joiner.on(",").join(Iterables.transform(partitionDMLInfos, PARTITION_SPEC_GENERATOR)))); return ddls; }
@Override protected String visitArrayConstructor(ArrayConstructor node, Void context) { ImmutableList.Builder<String> valueStrings = ImmutableList.builder(); for (Expression value : node.getValues()) { valueStrings.add(formatSql(value, parameters)); } return "ARRAY[" + Joiner.on(",").join(valueStrings.build()) + "]"; }
final Set<String> workerNodeIds = Sets.newHashSet( workerConfig.getAutoScaler().ipToIdLookup( Lists.newArrayList( Iterables.transform( runner.getLazyWorkers(), new Function<Worker, String>() laziestWorkerIps.size(), excessWorkers, Joiner.on(", ").join(laziestWorkerIps) ); .terminate(ImmutableList.copyOf(laziestWorkerIps)); if (terminated != null) { currentlyTerminating.addAll(terminated.getNodeIds());
private String assembleInstanceKey(String appId, String cluster, String ip, String datacenter) { List<String> keyParts = Lists.newArrayList(appId, cluster, ip); if (!Strings.isNullOrEmpty(datacenter)) { keyParts.add(datacenter); } return STRING_JOINER.join(keyParts); }
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); String parameterizedHost = Joiner.on(", ").join("{accountName}", accountName, "{adlaCatalogDnsSuffix}", this.client.adlaCatalogDnsSuffix()); return service.deleteAllSecrets(databaseName, this.client.apiVersion(), this.client.acceptLanguage(), parameterizedHost, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
/** * Like {@code inputs.toString()}, but with the nonsense {@code toString} representations * replaced with the name of each future from {@link #allFutures}. */ String smartToString(ImmutableSet<ListenableFuture<String>> inputs) { Iterable<String> inputNames = Iterables.transform(inputs, nameGetter); return Joiner.on(", ").join(inputNames); }