/** * Creates a TokenizedPattern from the same tokens that make up * this path. * * @return TokenizedPattern */ public TokenizedPattern toPattern() { return new TokenizedPattern(path, tokenizedPath); }
private List<TokenizedPattern> patterns(String patts) { List<TokenizedPattern> r = new ArrayList<>(); if (patts != null) { for (String patt : patts.split(",")) { if (patt.endsWith("/")) { patt += SelectorUtils.DEEP_TREE_MATCH; } r.add(new TokenizedPattern(patt.replace('/', File.separatorChar))); } } return r; }
/** * Returns a new pattern without the last token of this pattern. * * @return TokenizedPattern */ public TokenizedPattern withoutLastToken() { if (tokenizedPattern.length == 0) { throw new IllegalStateException("can't strip a token from nothing"); } if (tokenizedPattern.length == 1) { return EMPTY_PATTERN; } String toStrip = tokenizedPattern[tokenizedPattern.length - 1]; int index = pattern.lastIndexOf(toStrip); String[] tokens = new String[tokenizedPattern.length - 1]; System.arraycopy(tokenizedPattern, 0, tokens, 0, tokenizedPattern.length - 1); return new TokenizedPattern(pattern.substring(0, index), tokens); }
/** * Lists recursive files of this directory with pattern matching. * <p>The default implementation calls {@link #list()} recursively inside {@link #run} and applies filtering to the result. * Implementations may wish to override this more efficiently. * @param includes comma-separated Ant-style globs as per {@link Util#createFileSet(File, String, String)} using {@code /} as a path separator; * the empty string means <em>no matches</em> (use {@link SelectorUtils#DEEP_TREE_MATCH} if you want to match everything except some excludes) * @param excludes optional excludes in similar format to {@code includes} * @param useDefaultExcludes as per {@link AbstractFileSet#setDefaultexcludes} * @return a list of {@code /}-separated relative names of children (files directly inside or in subdirectories) * @throws IOException if this is not a directory, or listing was not possible for some other reason * @since 2.118 */ @Restricted(Beta.class) public @Nonnull Collection<String> list(@Nonnull String includes, @CheckForNull String excludes, boolean useDefaultExcludes) throws IOException { Collection<String> r = run(new CollectFiles(this)); List<TokenizedPattern> includePatterns = patterns(includes); List<TokenizedPattern> excludePatterns = patterns(excludes); if (useDefaultExcludes) { for (String patt : DirectoryScanner.getDefaultExcludes()) { excludePatterns.add(new TokenizedPattern(patt.replace('/', File.separatorChar))); } } return r.stream().filter(p -> { TokenizedPath path = new TokenizedPath(p.replace('/', File.separatorChar)); return includePatterns.stream().anyMatch(patt -> patt.matchPath(path, true)) && !excludePatterns.stream().anyMatch(patt -> patt.matchPath(path, true)); }).collect(Collectors.toSet()); } private static final class CollectFiles extends MasterToSlaveCallable<Collection<String>, IOException> {
/** * removes from a pattern all tokens to the right containing wildcards * @param input the input string * @return the leftmost part of the pattern without wildcards */ public static String rtrimWildcardTokens(String input) { return new TokenizedPattern(input).rtrimWildcardTokens().toString(); } }
/** * Add all patterns that are not real patterns (do not contain * wildcards) to the set and returns the real patterns. * * @param map Map to populate. * @param patterns String[] of patterns. * @since Ant 1.8.0 */ private TokenizedPattern[] fillNonPatternSet(final Map<String, TokenizedPath> map, final String[] patterns) { final List<TokenizedPattern> al = new ArrayList<>(patterns.length); for (String pattern : patterns) { if (SelectorUtils.hasWildcards(pattern)) { al.add(new TokenizedPattern(pattern)); } else { final String s = isCaseSensitive() ? pattern : pattern.toUpperCase(); map.put(s, new TokenizedPath(s)); } } return al.toArray(new TokenizedPattern[al.size()]); }