public HivePartitionVersionFinder(FileSystem fs, State state, List<String> patterns) { this.fs = fs; this.state = new State(state); this.patterns = patterns; }
/** * Get final state for this object. By default this returns an empty {@link org.apache.gobblin.configuration.State}, but * concrete subclasses can add information that will be added to the task state. * @return Empty {@link org.apache.gobblin.configuration.State}. */ @Override public State getFinalState() { return new State(); }
/** * Get final state for this object. By default this returns an empty {@link org.apache.gobblin.configuration.State}, but * concrete subclasses can add information that will be added to the task state. * @return Empty {@link org.apache.gobblin.configuration.State}. */ @Override public State getFinalState() { return new State(); }
public CleanableHivePartitionDataset(Partition partition, FileSystem fs, State state) { super(partition); this.fs = fs; this.state = new State(state); }
@SuppressWarnings("unchecked") public T withProps(State props) { this.props = new State(props.getProperties()); return (T) this; }
/** * Get an instance. The location of the master password file is provided via "encrypt.key.loc". */ public static PasswordManager getInstance(Properties props) { return getInstance(new State(props)); }
@Override public State getFinalState() { State state = new State(); if (this.writer instanceof FinalState) { state.addAll(((FinalState)this.writer).getFinalState()); } else { LOG.warn("Wrapped writer does not implement FinalState: " + this.writer.getClass()); } return state; }
public ComplianceValidationJob(Properties properties) { super(properties); initDatasetFinder(properties); try { ProxyUtils.cancelTokens(new State(properties)); } catch (IOException | TException | InterruptedException e) { Throwables.propagate(e); } }
public HiveRegistrationCompactorListener(Properties properties) { State state = new State(properties); this.hiveRegister = HiveRegister.get(state); this.hiveRegistrationPolicy = HiveRegistrationPolicyBase.getPolicy(state); }
private List<HivePartitionVersion> findVersions(String name, String urn) throws IOException { State state = new State(this.state); Preconditions.checkArgument(this.state.contains(ComplianceConfigurationKeys.HIVE_VERSIONS_WHITELIST), "Missing required property " + ComplianceConfigurationKeys.HIVE_VERSIONS_WHITELIST); state.setProp(ComplianceConfigurationKeys.HIVE_DATASET_WHITELIST, this.state.getProp(ComplianceConfigurationKeys.HIVE_VERSIONS_WHITELIST)); setVersions(name, state); log.info("Found " + this.versions.size() + " versions for the dataset " + urn); return this.versions; }
public MetricContext getDefaultMetricContext() { org.apache.gobblin.configuration.State fakeState = new org.apache.gobblin.configuration.State(getSysConfig().getConfigAsProperties()); List<Tag<?>> tags = new ArrayList<>(); tags.add(new Tag<>(StandardMetrics.INSTANCE_NAME_TAG, getInstanceName())); MetricContext res = Instrumented.getMetricContext(fakeState, StandardGobblinInstanceDriver.class, tags); return res; }
@Override public State getFinalState() { State state = new State(); state.setProp("RecordsWritten", recordsWritten()); try { state.setProp("BytesWritten", bytesWritten()); } catch (Exception exception) { // If Writer fails to return bytesWritten, it might not be implemented, or implemented incorrectly. // Omit property instead of failing. } return state; }
@Test (enabled=false) public void testMasterPasswordNotExist() { String password = "ENC(" + UUID.randomUUID().toString() + ")"; State state = new State(); state.setProp(ConfigurationKeys.ENCRYPT_KEY_LOC, UUID.randomUUID()); Assert.assertEquals(PasswordManager.getInstance(state).readPassword(password), password); }
private State buildDefaultState(int numBranches) throws IOException { State state = new State(); state.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, numBranches); File tmpLocation = File.createTempFile("metadata", ""); tmpLocation.delete(); state.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR, tmpLocation.getParent()); state.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, tmpLocation.getName()); return state; }
/** * Create a {@link WorkUnitState} with a {@link org.apache.gobblin.broker.iface.SharedResourcesBroker} for running unit tests of * constructs. */ public static WorkUnitState createTestWorkUnitState() { return new WorkUnitState(new WorkUnit(), new State(), SharedResourcesBrokerFactory.createDefaultTopLevelBroker( ConfigFactory.empty(), GobblinScopeTypes.GLOBAL.defaultScopeInstance()). newSubscopedBuilder(new JobScopeInstance("jobName", "testJob"))); }
public void retryTest() throws IOException { DataWriter<Void> writer = mock(DataWriter.class); doThrow(new RuntimeException()).when(writer).writeEnvelope(any(RecordEnvelope.class)); DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, new State()); DataWriter<Void> retryWriter = builder.build(); try { retryWriter.writeEnvelope(new RecordEnvelope<>(null)); Assert.fail("Should have failed."); } catch (Exception e) { } verify(writer, times(5)).writeEnvelope(any(RecordEnvelope.class)); }
private static void executeDropTableQuery(HiveDataset hiveDataset, Properties properties) throws IOException { String dbName = hiveDataset.getTable().getDbName(); String tableName = hiveDataset.getTable().getTableName(); Optional<String> datasetOwner = Optional.fromNullable(hiveDataset.getTable().getOwner()); try (HiveProxyQueryExecutor hiveProxyQueryExecutor = ProxyUtils .getQueryExecutor(new State(properties), datasetOwner)) { hiveProxyQueryExecutor.executeQuery(HivePurgerQueryTemplate.getDropTableQuery(dbName, tableName), datasetOwner); } catch (SQLException e) { throw new IOException(e); } }
private DataWriter<Void> setup(DataWriter<Void> writer, int parallelism, int rate, ThrottleType type) throws IOException { State state = new State(); state.appendToSetProp(ThrottleWriter.WRITER_LIMIT_RATE_LIMIT_KEY, Integer.toString(rate * parallelism)); state.appendToSetProp(ThrottleWriter.WRITER_THROTTLE_TYPE_KEY, type.name()); state.appendToSetProp(ConfigurationKeys.TASK_EXECUTOR_THREADPOOL_SIZE_KEY, Integer.toString(parallelism)); state.appendToSetProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, Integer.toString(parallelism)); DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, state); return builder.build(); } }
@Test public void testSchemaUrlWithTempFile() throws IOException { final String SCHEMA_FILE_NAME = "test_temp.avsc"; State state = new State(); state.setProp(HiveAvroSerDeManager.SCHEMA_LITERAL_LENGTH_LIMIT, "10"); state.setProp(HiveAvroSerDeManager.USE_SCHEMA_TEMP_FILE, "true"); state.setProp(HiveAvroSerDeManager.SCHEMA_FILE_NAME, SCHEMA_FILE_NAME); state.setProp(HiveAvroSerDeManager.USE_SCHEMA_TEMP_FILE, "true"); validateSchemaUrl(state, SCHEMA_FILE_NAME, false); }
private WorkUnitState getMockWorkUnitState(Long lowWaterMark, Long highWaterMark) { WorkUnit mockWorkUnit = WorkUnit.createEmpty(); mockWorkUnit.setWatermarkInterval(new WatermarkInterval(new MultiLongWatermark(new ArrayList<Long>(){{add(lowWaterMark);}}), new MultiLongWatermark(new ArrayList<Long>(){{add(highWaterMark);}}))); WorkUnitState mockWorkUnitState = new WorkUnitState(mockWorkUnit, new State()); mockWorkUnitState.setProp(KafkaSource.TOPIC_NAME, TEST_TOPIC_NAME); mockWorkUnitState.setProp(KafkaSource.PARTITION_ID, "1"); mockWorkUnitState.setProp(ConfigurationKeys.KAFKA_BROKERS, "localhost:8080"); mockWorkUnitState.setProp(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, TEST_URL); return mockWorkUnitState; }