/** * Get the value of a property as an short, using the given default value if the property is not set. * * @param key property key * @param def default value * @param radix radix used to parse the value * @return short value associated with the key or the default value if the property is not set */ public short getPropAsShortWithRadix(String key, short def, int radix) { return contains(key) ? Short.parseShort(getProp(key), radix) : def; }
private String getDstBaseDir(State state) { Preconditions.checkArgument(state.contains(MRCompactor.COMPACTION_DEST_DIR), "Missing required property " + MRCompactor.COMPACTION_DEST_DIR); return state.getProp(MRCompactor.COMPACTION_DEST_DIR); }
/** * Creates {@link Path} for the {@link ConfigurationKeys#WRITER_FILE_PATH} key according to * {@link ConfigurationKeys#EXTRACT_TABLE_NAME_KEY}. * @param state * @return */ public static Path getTableNameWriterFilePath(State state) { Preconditions.checkArgument(state.contains(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY)); return new Path(state.getProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY)); }
@SuppressWarnings("unchecked") private Class<? extends AbstractRunner> getRunnerClass() { Preconditions.checkArgument(this.props.contains(COMPACTION_COMPLETENESS_VERIFICATION_CLASS), "Missing required property " + COMPACTION_COMPLETENESS_VERIFICATION_CLASS); try { return (Class<? extends AbstractRunner>) Class .forName(this.props.getProp(COMPACTION_COMPLETENESS_VERIFICATION_CLASS)); } catch (Throwable t) { LOG.error("Failed to get data completeness verification class", t); throw Throwables.propagate(t); } }
private String getSrcBaseDir(State state) { Preconditions.checkArgument(state.contains(MRCompactor.COMPACTION_INPUT_DIR), "Missing required property " + MRCompactor.COMPACTION_INPUT_DIR); return state.getProp(MRCompactor.COMPACTION_INPUT_DIR); }
private FileSystem getFileSystem() throws IOException { if (this.state.contains(COMPACTION_FILE_SYSTEM_URI)) { URI uri = URI.create(this.state.getProp(COMPACTION_FILE_SYSTEM_URI)); return FileSystem.get(uri, this.conf); } return FileSystem.get(this.conf); }
private String getInputDir() { Preconditions.checkArgument(this.state.contains(MRCompactor.COMPACTION_INPUT_DIR), "Missing required property " + MRCompactor.COMPACTION_INPUT_DIR); return this.state.getProp(MRCompactor.COMPACTION_INPUT_DIR); }
private String getDestDir() { Preconditions.checkArgument(this.state.contains(MRCompactor.COMPACTION_DEST_DIR), "Missing required property " + MRCompactor.COMPACTION_DEST_DIR); return this.state.getProp(MRCompactor.COMPACTION_DEST_DIR); }
private String getSrcSubDir(State state) { Preconditions.checkArgument(state.contains(MRCompactor.COMPACTION_INPUT_SUBDIR), "Missing required property " + MRCompactor.COMPACTION_INPUT_SUBDIR); return state.getProp(MRCompactor.COMPACTION_INPUT_SUBDIR); }
protected HiveSnapshotRegistrationPolicy(State props) throws IOException { super(props); this.snapshotPathPattern = props.contains(SNAPSHOT_PATH_PATTERN) ? Optional.of(Pattern.compile(props.getProp(SNAPSHOT_PATH_PATTERN))) : Optional.<Pattern> absent(); }
/** * Delete dependency jars from HDFS when job is done. */ private void deleteDependencyJars() throws IllegalArgumentException, IOException { if (this.state.contains(COMPACTION_JARS)) { this.fs.delete(new Path(this.state.getProp(COMPACTION_JARS)), true); } }
/** * Refer to {@link MRCompactorAvroKeyDedupJobRunner#getDedupKeyOption()} */ private MRCompactorAvroKeyDedupJobRunner.DedupKeyOption getDedupKeyOption() { if (!this.state.contains(MRCompactorAvroKeyDedupJobRunner.COMPACTION_JOB_DEDUP_KEY)) { return MRCompactorAvroKeyDedupJobRunner.DEFAULT_DEDUP_KEY_OPTION; } Optional<MRCompactorAvroKeyDedupJobRunner.DedupKeyOption> option = Enums.getIfPresent(MRCompactorAvroKeyDedupJobRunner.DedupKeyOption.class, this.state.getProp(MRCompactorAvroKeyDedupJobRunner.COMPACTION_JOB_DEDUP_KEY).toUpperCase()); return option.isPresent() ? option.get() : MRCompactorAvroKeyDedupJobRunner.DEFAULT_DEDUP_KEY_OPTION; }
private void addJars(Configuration conf) throws IOException { if (!state.contains(MRCompactor.COMPACTION_JARS)) { return; } Path jarFileDir = new Path(state.getProp(MRCompactor.COMPACTION_JARS)); for (FileStatus status : this.fs.listStatus(jarFileDir)) { DistributedCache.addFileToClassPath(status.getPath(), conf, this.fs); } }
private static FileSystem getFileSystem(State state) { try { if (state.contains(MRCompactor.COMPACTION_FILE_SYSTEM_URI)) { URI uri = URI.create(state.getProp(MRCompactor.COMPACTION_FILE_SYSTEM_URI)); return FileSystem.get(uri, HadoopUtils.getConfFromState(state)); } return FileSystem.get(HadoopUtils.getConfFromState(state)); } catch (IOException e) { throw new RuntimeException("Failed to get filesystem for datasetsFinder.", e); } }
private static FileSystem getSourceFs(State state) throws IOException { if (state.contains(HIVE_SOURCE_FS_URI)) { return FileSystem.get(URI.create(state.getProp(HIVE_SOURCE_FS_URI)), HadoopUtils.getConfFromState(state)); } return FileSystem.get(HadoopUtils.getConfFromState(state)); }
private DedupKeyOption getDedupKeyOption() { if (!this.dataset.jobProps().contains(COMPACTION_JOB_DEDUP_KEY)) { return DEFAULT_DEDUP_KEY_OPTION; } Optional<DedupKeyOption> option = Enums.getIfPresent(DedupKeyOption.class, this.dataset.jobProps().getProp(COMPACTION_JOB_DEDUP_KEY).toUpperCase()); return option.isPresent() ? option.get() : DEFAULT_DEDUP_KEY_OPTION; }
@Override public void updateSchema(HiveRegistrationUnit existingUnit, HiveRegistrationUnit newUnit) throws IOException { Preconditions.checkArgument( newUnit.getSerDeProps().contains(SCHEMA_LITERAL) || newUnit.getSerDeProps().contains(SCHEMA_URL)); if (newUnit.getSerDeProps().contains(SCHEMA_LITERAL)) { existingUnit.setSerDeProp(SCHEMA_LITERAL, newUnit.getSerDeProps().getProp(SCHEMA_LITERAL)); } else { existingUnit.setSerDeProp(SCHEMA_URL, newUnit.getSerDeProps().getProp(SCHEMA_URL)); } }
/** * Calls {@link #getOptionallyThrottledFileSystem(FileSystem, int)} parsing the qps from the input {@link State} * at key {@link #MAX_FILESYSTEM_QPS}. * @throws IOException */ public static FileSystem getOptionallyThrottledFileSystem(FileSystem fs, State state) throws IOException { DeprecationUtils.renameDeprecatedKeys(state, MAX_FILESYSTEM_QPS, DEPRECATED_KEYS); if (state.contains(MAX_FILESYSTEM_QPS)) { return getOptionallyThrottledFileSystem(fs, state.getPropAsInt(MAX_FILESYSTEM_QPS)); } return fs; }
private void setCompactionSLATimestamp(Dataset dataset) { // Set up SLA timestamp only if this dataset will be compacted and MRCompactor.COMPACTION_INPUT_PATH_TIME is present. if ((this.recompactFromOutputPaths || !MRCompactor.datasetAlreadyCompacted(this.fs, dataset, this.renameSourceDirEnabled)) && dataset.jobProps().contains(MRCompactor.COMPACTION_INPUT_PATH_TIME)) { long timeInMills = dataset.jobProps().getPropAsLong(MRCompactor.COMPACTION_INPUT_PATH_TIME); // Set the upstream time to partition + 1 day. E.g. for 2015/10/13 the upstream time is midnight of 2015/10/14 CompactionSlaEventHelper.setUpstreamTimeStamp(this.state, timeInMills + TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS)); } }
private static void getFsAndJtTokensImpl(final State state, final Configuration conf, final Credentials cred) throws IOException { getHdfsToken(conf, cred); if (state.contains(OTHER_NAMENODES)) { getOtherNamenodesToken(state.getPropAsList(OTHER_NAMENODES), conf, cred); } getJtToken(cred); }