private static long getCompactorTxnId(Configuration jobConf) { String snapshot = jobConf.get(ValidTxnList.VALID_TXNS_KEY); if(Strings.isNullOrEmpty(snapshot)) { throw new IllegalStateException(ValidTxnList.VALID_TXNS_KEY + " not found for writing to " + jobConf.get(FINAL_LOCATION)); } ValidTxnList validTxnList = new ValidReadTxnList(); validTxnList.readFromString(snapshot); //this is id of the current (compactor) txn return validTxnList.getHighWatermark(); } private void getWriter(Reporter reporter, ObjectInspector inspector,
while (!initialized && currentLocationIndex < locations.length) { String address = locations[currentLocationIndex++]; if (Strings.isNullOrEmpty(address)) { throw new IOException("can not fetch results from empty or null host value");
+ "CMVAS or CTAS statement"); segmentGranularity = Strings.isNullOrEmpty(segmentGranularity) ? HiveConf .getVar(parseCtx.getConf(), HiveConf.ConfVars.HIVE_DRUID_INDEXING_GRANULARITY
@Override public void set(@NotEmpty final String key, @NonNull final StringValue value) throws MetadataException { Preconditions.checkArgument(!Strings.isNullOrEmpty(key)); this.metadataMap.put(key, value); }
@Override public Optional<StringValue> get(@NotEmpty final String key) throws MetadataException { Preconditions.checkArgument(!Strings.isNullOrEmpty(key)); return this.metadataMap.containsKey(key) ? Optional.of(this.metadataMap.get(key)) : Optional.absent(); }
public ParsedVersion(String application, String version, String appBuildHash) { checkArgument(!Strings.isNullOrEmpty(application), "application cannot be null or empty"); this.application = application; this.version = Strings.isNullOrEmpty(version) ? null : version; this.appBuildHash = Strings.isNullOrEmpty(appBuildHash) ? null : appBuildHash; SemanticVersion sv; boolean hasSemver; try { sv = SemanticVersion.parse(version); hasSemver = true; } catch (RuntimeException e) { sv = null; hasSemver = false; } catch (SemanticVersionParseException e) { sv = null; hasSemver = false; } this.semver = sv; this.hasSemver = hasSemver; }
public ParsedVersion(String application, String version, String appBuildHash) { checkArgument(!Strings.isNullOrEmpty(application), "application cannot be null or empty"); this.application = application; this.version = Strings.isNullOrEmpty(version) ? null : version; this.appBuildHash = Strings.isNullOrEmpty(appBuildHash) ? null : appBuildHash; SemanticVersion sv; boolean hasSemver; try { sv = SemanticVersion.parse(version); hasSemver = true; } catch (RuntimeException e) { sv = null; hasSemver = false; } catch (SemanticVersionParseException e) { sv = null; hasSemver = false; } this.semver = sv; this.hasSemver = hasSemver; }
public static FieldProjectionFilter getFieldProjectionFilter(Configuration conf) { String deprecated = conf.get(THRIFT_COLUMN_FILTER_KEY); String strict = conf.get(STRICT_THRIFT_COLUMN_FILTER_KEY); if (Strings.isNullOrEmpty(deprecated) && Strings.isNullOrEmpty(strict)) { return null; } if(!Strings.isNullOrEmpty(deprecated) && !Strings.isNullOrEmpty(strict)) { throw new ThriftProjectionException( "You cannot provide both " + THRIFT_COLUMN_FILTER_KEY + " and " + STRICT_THRIFT_COLUMN_FILTER_KEY +"! " + THRIFT_COLUMN_FILTER_KEY + " is deprecated." ); } if (!Strings.isNullOrEmpty(deprecated)) { LOG.warn("Using {} is deprecated. Please see the docs for {}!", THRIFT_COLUMN_FILTER_KEY, STRICT_THRIFT_COLUMN_FILTER_KEY); return new DeprecatedFieldProjectionFilter(deprecated); } return StrictFieldProjectionFilter.fromSemicolonDelimitedString(strict); }
@JsonCreator public Space( @JsonProperty("id") @DefaultValue("null") String id, // default is null for new spaces @JsonProperty("name") String name, @JsonProperty("description") String description, @JsonProperty("version") String version, @JsonProperty("contents") NamespaceTree contents, @JsonProperty("datasetCount") int datasetCount, @JsonProperty("ctime") Long ctime ) { checkArgument(!isNullOrEmpty(name), "space name can not be empty"); this.id = id; this.name = name; this.description = description; this.version = version; this.contents = contents; this.datasetCount = datasetCount; this.ctime = ctime; }
/** * Normalizes the given path eliminating repeated forward slashes. * * @return normalized path */ public static final String normalize(final String path) { if (Strings.isNullOrEmpty(Preconditions.checkNotNull(path))) { return path; } final StringBuilder builder = new StringBuilder(); char last = path.charAt(0); builder.append(last); for (int i=1; i<path.length(); i++) { char cur = path.charAt(i); if (last == '/' && cur == last) { continue; } builder.append(cur); last = cur; } return builder.toString(); }
public static ParsedVersion parse(String createdBy) throws VersionParseException { Matcher matcher = PATTERN.matcher(createdBy); if(!matcher.matches()){ throw new VersionParseException("Could not parse created_by: " + createdBy + " using format: " + FORMAT); } String application = matcher.group(1); String semver = matcher.group(2); String appBuildHash = matcher.group(3); if (Strings.isNullOrEmpty(application)) { throw new VersionParseException("application cannot be null or empty"); } return new ParsedVersion(application, semver, appBuildHash); }
/** * Returns a normalized, combined path out of the given path segments. * * @param parts path segments to combine * @see #normalize(String) */ public static final String join(final String... parts) { final StringBuilder sb = new StringBuilder(); for (final String part:parts) { Preconditions.checkNotNull(part, "parts cannot contain null"); if (!Strings.isNullOrEmpty(part)) { sb.append(part).append("/"); } } if (sb.length() > 0) { sb.deleteCharAt(sb.length() - 1); } final String path = sb.toString(); return normalize(path); }
public static ParsedVersion parse(String createdBy) throws VersionParseException { Matcher matcher = PATTERN.matcher(createdBy); if(!matcher.matches()){ throw new VersionParseException("Could not parse created_by: " + createdBy + " using format: " + FORMAT); } String application = matcher.group(1); String semver = matcher.group(2); String appBuildHash = matcher.group(3); if (Strings.isNullOrEmpty(application)) { throw new VersionParseException("application cannot be null or empty"); } return new ParsedVersion(application, semver, appBuildHash); }
private static HiveConf getHiveConf(String hiveMetastoreURI) { checkArgument(!Strings.isNullOrEmpty(hiveMetastoreURI), "hiveMetastoreURI cannot be null or empty"); HiveConf hiveConf = new HiveConf(); hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, hiveMetastoreURI); return hiveConf; }
public static boolean requiresSequentialReads(String createdBy, Encoding encoding) { if (encoding != Encoding.DELTA_BYTE_ARRAY) { return false; } if (Strings.isNullOrEmpty(createdBy)) { LOG.info("Requiring sequential reads because file version is empty. See PARQUET-246"); return true; } try { return requiresSequentialReads(VersionParser.parse(createdBy), encoding); } catch (RuntimeException e) { warnParseError(createdBy, e); return true; } catch (VersionParser.VersionParseException e) { warnParseError(createdBy, e); return true; } }
public static boolean requiresSequentialReads(String createdBy, Encoding encoding) { if (encoding != Encoding.DELTA_BYTE_ARRAY) { return false; } if (Strings.isNullOrEmpty(createdBy)) { LOG.info("Requiring sequential reads because file version is empty. See PARQUET-246"); return true; } try { return requiresSequentialReads(VersionParser.parse(createdBy), encoding); } catch (RuntimeException e) { warnParseError(createdBy, e); return true; } catch (VersionParser.VersionParseException e) { warnParseError(createdBy, e); return true; } }
if (Strings.isNullOrEmpty(newUri)) { newUri = req.getContextPath(); if (Strings.isNullOrEmpty(newUri)) { newUri = WebServerConstants.WEBSERVER_ROOT_PATH;
private static Map<String, String> readMetadataInfo( @NonNull final HoodieConfiguration hoodieConf) { try { final FileSystem fs = FSUtils.getFs(hoodieConf.getConf()); HoodieUtil.initHoodieDataset(fs, hoodieConf); final HoodieTableMetaClient hoodieTableMetaClient = new HoodieTableMetaClient(new HadoopConfiguration(hoodieConf.getConf()).getHadoopConf(), hoodieConf.getBasePath(), true); final HoodieActiveTimeline hoodieActiveTimeline = hoodieTableMetaClient.getActiveTimeline(); final java.util.Optional<HoodieInstant> lastInstant = hoodieActiveTimeline.getCommitTimeline() .filterCompletedInstants().lastInstant(); if (lastInstant.isPresent()) { log.info("using hoodie instant for reading checkpoint info :{}", lastInstant.get().getTimestamp()); final HoodieCommitMetadata commitMetadata = HoodieCommitMetadata.fromBytes(hoodieActiveTimeline.getInstantDetails(lastInstant.get()).get()); final String serCommitInfo = commitMetadata.getMetadata(HOODIE_METADATA_KEY); if (!Strings.isNullOrEmpty(serCommitInfo)) { return MapUtil.deserializeMap(serCommitInfo); } } return new HashMap<>(); } catch (IOException e) { log.error("failed to read metadata info", e); throw new JobRuntimeException("failed to read metadata information", e); } } }
if (Strings.isNullOrEmpty(createdBy)) { if (Strings.isNullOrEmpty(version.version)) { warnOnce("Ignoring statistics because created_by did not contain a semver (see PARQUET-251): " + createdBy); return true;
if (Strings.isNullOrEmpty(createdBy)) { if (Strings.isNullOrEmpty(version.version)) { warnOnce("Ignoring statistics because created_by did not contain a semver (see PARQUET-251): " + createdBy); return true;