@Override public List<URI> convertFrom(String value) { if (value == null) { throw new ParameterException("URI List must not be null."); } final Iterable<String> splittedUris = Splitter.on(SEPARATOR) .omitEmptyStrings() .trimResults() .split(value); return StreamSupport.stream(splittedUris.spliterator(), false) .map(this::constructURIFromString) .collect(Collectors.toList()); }
@JsonCreator public ResourceGroupIdTemplate(String fullId) { List<String> segments = Splitter.on(".").splitToList(requireNonNull(fullId, "fullId is null")); checkArgument(!segments.isEmpty(), "Resource group id is empty"); this.segments = segments.stream() .map(ResourceGroupNameTemplate::new) .collect(Collectors.toList()); }
public static ImmutableSet<HostAddress> parseNodes(String nodes) { Splitter splitter = Splitter.on(',').omitEmptyStrings().trimResults(); return ImmutableSet.copyOf(transform(splitter.split(nodes), KafkaConnectorConfig::toHostAddress)); }
/** * Does the domain name match one of the "wildcard" patterns (e.g. {@code "*.ar"})? If a {@code * desiredType} is specified, the wildcard pattern must also match that type. */ private static boolean matchesWildcardSuffixType( Optional<PublicSuffixType> desiredType, String domain) { List<String> pieces = DOT_SPLITTER.limit(2).splitToList(domain); return pieces.size() == 2 && matchesType( desiredType, Optional.fromNullable(PublicSuffixPatterns.UNDER.get(pieces.get(1)))); }
private static List<String> splitSessionHeader(Enumeration<String> headers) { Splitter splitter = Splitter.on(',').trimResults().omitEmptyStrings(); return Collections.list(headers).stream() .map(splitter::splitToList) .flatMap(Collection::stream) .collect(toImmutableList()); }
return Optional.empty(); return Optional.empty(); return Optional.empty(); List<String> fileColumnNames = Splitter.on(',').trimResults().omitEmptyStrings().splitToList(schema.getProperty(META_TABLE_COLUMNS, "")); List<Type> fileColumnTypes = toHiveTypes(schema.getProperty(META_TABLE_COLUMN_TYPES, "")).stream() .map(hiveType -> hiveType.getType(typeManager)) .collect(toList()); .mapToInt(inputColumnNames::indexOf) .toArray(); codecName, fileInputColumnIndexes, ImmutableMap.<String, String>builder() .put(HiveMetadata.PRESTO_VERSION_NAME, nodeVersion.toString()) .put(HiveMetadata.PRESTO_QUERY_ID_NAME, session.getQueryId())
private ConnectorTableMetadata getTableMetadata(SchemaTableName tableName) if (!table.isPresent() || table.get().getTableType().equals(TableType.VIRTUAL_VIEW.name())) { throw new TableNotFoundException(tableName); Function<HiveColumnHandle, ColumnMetadata> metadataGetter = columnMetadataGetter(table.get(), typeManager); ImmutableList.Builder<ColumnMetadata> columns = ImmutableList.builder(); for (HiveColumnHandle columnHandle : hiveColumnHandles(table.get())) { columns.add(metadataGetter.apply(columnHandle)); ImmutableMap.Builder<String, Object> properties = ImmutableMap.builder(); if (table.get().getTableType().equals(EXTERNAL_TABLE.name())) { properties.put(EXTERNAL_LOCATION_PROPERTY, table.get().getStorage().getLocation()); List<String> partitionedBy = table.get().getPartitionColumns().stream() .map(Column::getName) .collect(toList()); if (!partitionedBy.isEmpty()) { properties.put(PARTITIONED_BY_PROPERTY, partitionedBy); String orcBloomFilterColumns = table.get().getParameters().get(ORC_BLOOM_FILTER_COLUMNS_KEY); if (orcBloomFilterColumns != null) { properties.put(ORC_BLOOM_FILTER_COLUMNS, Splitter.on(',').trimResults().omitEmptyStrings().splitToList(orcBloomFilterColumns)); String orcBloomFilterFfp = table.get().getParameters().get(ORC_BLOOM_FILTER_FPP_KEY); Optional<String> comment = Optional.ofNullable(table.get().getParameters().get(TABLE_COMMENT)); return new ConnectorTableMetadata(tableName, columns.build(), properties.build(), comment);
@Override public Collection<Import> apply(final String url, final Import previous) { String fname = nameWithExtension(url); List<String> segments = Splitter.on('/').trimResults().omitEmptyStrings() .splitToList(previous.getAbsoluteUri().toString()); String relative = segments.subList(0, segments.size() - 1).stream() .collect(Collectors.joining("/", "/", "")); return Arrays.asList(relative + fname, fname) .stream() .map(resolver::apply) .filter(it -> it != null) .findFirst() .map(throwingFunction(it -> Try.with(it.toURL()::openStream) .apply(in -> new String(ByteStreams.toByteArray(in), StandardCharsets.UTF_8)) .map(content -> Arrays.asList(new Import(it, it, content))) .get() )) .orElse(null); }
public static RecordReader<?, ?> createRecordReader(Configuration configuration, Path path, long start, long length, Properties schema, List<HiveColumnHandle> columns) List<HiveColumnHandle> readColumns = ImmutableList.copyOf(filter(columns, column -> column.getColumnType() == REGULAR)); List<Integer> readHiveColumnIndexes = ImmutableList.copyOf(transform(readColumns, HiveColumnHandle::getHiveColumnIndex)); .filter(name -> name.startsWith("serialization.")) .forEach(name -> jobConf.set(name, schema.getProperty(name))); List<String> codecs = newArrayList(Splitter.on(",").trimResults().omitEmptyStrings().split(jobConf.get("io.compression.codecs", ""))); if (!codecs.contains(LzoCodec.class.getName())) { codecs.add(0, LzoCodec.class.getName()); codecs.add(0, LzopCodec.class.getName()); jobConf.set("io.compression.codecs", codecs.stream().collect(joining(",")));
/** * Return list of propertyDto by component uuid, sorted from project to lowest module */ private Multimap<String, PropertyDto> loadComponentSettings(DbSession dbSession, Optional<String> key, ComponentDto component) { List<String> componentUuids = DOT_SPLITTER.splitToList(component.moduleUuidPath()); List<ComponentDto> componentDtos = dbClient.componentDao().selectByUuids(dbSession, componentUuids); Set<Long> componentIds = componentDtos.stream().map(ComponentDto::getId).collect(Collectors.toSet()); Map<Long, String> uuidsById = componentDtos.stream().collect(Collectors.toMap(ComponentDto::getId, ComponentDto::uuid)); List<PropertyDto> properties = key.isPresent() ? dbClient.propertiesDao().selectPropertiesByKeysAndComponentIds(dbSession, Collections.singleton(key.get()), componentIds) : dbClient.propertiesDao().selectPropertiesByComponentIds(dbSession, componentIds); Multimap<String, PropertyDto> propertyDtosByUuid = TreeMultimap.create(Ordering.explicit(componentUuids), Ordering.arbitrary()); for (PropertyDto propertyDto : properties) { Long componentId = propertyDto.getResourceId(); String componentUuid = uuidsById.get(componentId); propertyDtosByUuid.put(componentUuid, propertyDto); } return propertyDtosByUuid; }
@PathVariable String clusterName, @PathVariable String namespaceName, HttpServletResponse res) { List<String> fileNameSplit = Splitter.on(".").splitToList(namespaceName); String fileName = fileNameSplit.size() <= 1 ? Joiner.on(".") .join(namespaceName, ConfigFileFormat.Properties.getValue()) : namespaceName; NamespaceBO namespaceBO = namespaceService.loadNamespaceBO(appId, Env.fromString (env), clusterName, namespaceName); List<String> fileItems = namespaceBO.getItems().stream().map(itemBO -> { String key = itemBO.getItem().getKey(); String value = itemBO.getItem().getValue(); return Joiner.on("").join(itemBO.getItem().getKey(), itemBO.getItem().getValue()); return Joiner.on(" = ").join(itemBO.getItem().getKey(), itemBO.getItem().getValue()); }).collect(Collectors.toList());
static String yamlText(String text) { return Optional.ofNullable(text).map(lines -> Splitter.on("\n") .trimResults() .omitEmptyStrings() .splitToList(lines) .stream() .collect(Collectors.joining("\n")) ).orElse(null); } }
private static List<String> propertyValues(Configuration settings, String key, String defaultValue) { String s = settings.get(key).orElse(defaultValue); return StreamSupport.stream(Splitter.on(",").trimResults().omitEmptyStrings().split(s).spliterator(), false) .collect(Collectors.toList()); } }
@Config(SPILLER_SPILL_PATH) public FeaturesConfig setSpillerSpillPaths(String spillPaths) { List<String> spillPathsSplit = ImmutableList.copyOf(Splitter.on(",").trimResults().omitEmptyStrings().split(spillPaths)); this.spillerSpillPaths = spillPathsSplit.stream().map(Paths::get).collect(toImmutableList()); return this; }
@Config("jmx.dump-tables") public JmxConnectorConfig setDumpTables(String tableNames) { this.dumpTables = Splitter.on(Pattern.compile("(?<!\\\\),")) // match "," not preceded by "\" .omitEmptyStrings() .splitToList(tableNames) .stream() .map(part -> part.replace("\\,", ",")) // unescape all escaped commas .collect(Collectors.toSet()); return this; }
@GetMapping("/releases") public List<ReleaseDTO> findReleaseByIds(@RequestParam("releaseIds") String releaseIds) { Set<Long> releaseIdSet = RELEASES_SPLITTER.splitToList(releaseIds).stream().map(Long::parseLong) .collect(Collectors.toSet()); List<Release> releases = releaseService.findByReleaseIds(releaseIdSet); return BeanUtils.batchTransform(ReleaseDTO.class, releases); }
/** * Gets the flag value for a comma-separated set of enums of the given type, wrapped in an {@link * Optional}, which is empty if the flag is unset. If the flag is explicitly set to empty, an * empty set will be returned. */ public <T extends Enum<T>> Optional<ImmutableSet<T>> getEnumSet(String key, Class<T> clazz) { return this.get(key) .map( value -> Streams.stream(Splitter.on(',').omitEmptyStrings().split(value)) .map(v -> asEnumValue(key, v, clazz)) .collect(toImmutableSet())); }
@Test public void testRollingUpdate() throws Exception { final List<String> hosts = ImmutableList.of( "dc1-" + testHost() + "-a1.dc1.example.com", "dc1-" + testHost() + "-a2.dc1.example.com", final List<String> lines = Lists.newArrayList(Splitter.on("\n").split(output)); for (int i = 0; i < hosts.size(); i++) { assertThat(lines.get(i + 2), containsString(hosts.get(i)));