public void setJobProps(State jobProps) { this.jobProps.addAll(jobProps); }
/** * Deep copy constructor. * * @param extract the other {@link Extract} instance */ public Extract(Extract extract) { super.addAll(extract.getProperties()); }
Builder withState(State state) { this.state = new State(); this.state.addAll(state); return this; }
/** * Constructor. * * @param properties job configuration properties * @param previousWorkUnitStates an {@link Iterable} of {@link WorkUnitState}s of the previous job run */ public SourceState(State properties, Iterable<WorkUnitState> previousWorkUnitStates) { super.addAll(properties); this.previousDatasetStatesByUrns = ImmutableMap.of(); for (WorkUnitState workUnitState : previousWorkUnitStates) { this.previousWorkUnitStates.add(new ImmutableWorkUnitState(workUnitState)); } }
/** * Populates this instance with properties of the other instance. * * @param otherState the other {@link State} instance */ public void addAll(State otherState) { Properties diffCommonProps = new Properties(); diffCommonProps.putAll(Maps.difference(this.commonProperties, otherState.commonProperties).entriesOnlyOnRight()); addAll(diffCommonProps); addAll(otherState.specProperties); }
/** * Copy constructor. * * @param other the other {@link WorkUnit} instance * * @deprecated Use {@link #copyOf(WorkUnit)} */ @Deprecated public WorkUnit(WorkUnit other) { super.addAll(other); this.extract = other.getExtract(); }
HiveRegistrationUnit(Builder<?> builder) { Preconditions.checkArgument(!Strings.isNullOrEmpty(builder.dbName)); Preconditions.checkArgument(!Strings.isNullOrEmpty(builder.tableName)); this.dbName = builder.dbName; this.tableName = builder.tableName; this.columns.addAll(builder.columns); this.props.addAll(builder.props); this.storageProps.addAll(builder.storageProps); this.serDeProps.addAll(builder.serDeProps); this.serDeManager = builder.serDeManager; populateTablePartitionFields(this.props); populateStorageFields(this.storageProps); populateSerDeFields(this.serDeProps); }
/** * Constructor. * * @param state a {@link SourceState} the properties of which will be copied into this {@link WorkUnit} instance * @param extract an {@link Extract} * * @deprecated Properties in {@link SourceState} should not be added to a {@link WorkUnit}. Having each * {@link WorkUnit} contain a copy of {@link SourceState} is a waste of memory. Use {@link #create(Extract)}. */ @Deprecated public WorkUnit(SourceState state, Extract extract) { // Values should only be null for deserialization if (state != null) { super.addAll(state); } if (extract != null) { this.extract = extract; } else { this.extract = new Extract(null, null, null, null); } }
/** * Constructor. * * @param properties job configuration properties * @param previousDatasetStatesByUrns {@link SourceState} of the previous job run * @param previousWorkUnitStates an {@link Iterable} of {@link WorkUnitState}s of the previous job run */ public SourceState(State properties, Map<String, ? extends SourceState> previousDatasetStatesByUrns, Iterable<WorkUnitState> previousWorkUnitStates) { super.addAll(properties.getProperties()); this.previousDatasetStatesByUrns = ImmutableMap.copyOf(previousDatasetStatesByUrns); for (WorkUnitState workUnitState : previousWorkUnitStates) { this.previousWorkUnitStates.add(new ImmutableWorkUnitState(workUnitState)); } }
datasetSpecificConfigMap.get(dataset).addAll(StateUtils.jsonObjectToState(object, DATASET)); } else { datasetSpecificConfigMap.put(dataset, StateUtils.jsonObjectToState(object, DATASET));
super.addAll(state); super.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, type.toString()); super.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, namespace); Extract previousExtract = pre.getWorkunit().getExtract(); if (previousExtract.getNamespace().equals(namespace) && previousExtract.getTable().equals(table)) { this.previousTableState.addAll(pre);
jobProps.addAll(this.state); jobProps.setProp(MRCompactor.COMPACTION_ENABLE_SUCCESS_FILE, false); jobProps.setProp(MRCompactor.COMPACTION_INPUT_DEDUPLICATED, this.inputDeduplicated);
public MRCompactor(Properties props, List<? extends Tag<?>> tags, Optional<CompactorListener> compactorListener) throws IOException { this.state = new State(); this.state.addAll(props); this.initilizeTime = getCurrentTime(); this.tags = tags; this.conf = HadoopUtils.getConfFromState(this.state); this.tmpOutputDir = getTmpOutputDir(); this.fs = getFileSystem(); this.datasets = getDatasetsFinder().findDistinctDatasets(); this.jobExecutor = createJobExecutor(); this.jobRunnables = Maps.newConcurrentMap(); this.closer = Closer.create(); this.stopwatch = Stopwatch.createStarted(); this.gobblinMetrics = initializeMetrics(); this.eventSubmitter = new EventSubmitter.Builder( GobblinMetrics.get(this.state.getProp(ConfigurationKeys.JOB_NAME_KEY)).getMetricContext(), MRCompactor.COMPACTION_TRACKING_EVENTS_NAMESPACE).build(); this.compactorListener = compactorListener; this.dataVerifTimeoutMinutes = getDataVerifTimeoutMinutes(); this.compactionTimeoutMinutes = getCompactionTimeoutMinutes(); this.shouldVerifDataCompl = shouldVerifyDataCompleteness(); this.compactionCompleteListener = getCompactionCompleteListener(); this.verifier = this.shouldVerifDataCompl ? Optional.of(this.closer.register(new DataCompletenessVerifier(this.state))) : Optional.<DataCompletenessVerifier> absent(); this.shouldPublishDataIfCannotVerifyCompl = shouldPublishDataIfCannotVerifyCompl(); }
jobPropsState.addAll(jobProps); this.jobState = new JobState(jobPropsState, this.datasetStateStore.getLatestDatasetStatesByUrns(this.jobName), this.jobName,