/** * Check whether metrics collection and reporting are enabled or not. * * @param properties Configuration properties * @return whether metrics collection and reporting are enabled */ public static boolean isEnabled(Properties properties) { return PropertiesUtils .getPropAsBoolean(properties, ConfigurationKeys.METRICS_ENABLED_KEY, ConfigurationKeys.DEFAULT_METRICS_ENABLED); }
private SchedulerDaemon(Properties defaultProperties, Properties customProperties) throws Exception { this(PropertiesUtils.combineProperties(defaultProperties, customProperties)); }
public SchedulerService(Properties props) { this(Boolean.parseBoolean( props.getProperty(ConfigurationKeys.SCHEDULER_WAIT_FOR_JOB_COMPLETION_KEY, ConfigurationKeys.DEFAULT_SCHEDULER_WAIT_FOR_JOB_COMPLETION)), Optional.of(PropertiesUtils.extractPropertiesWithPrefix(props, Optional.of("org.quartz.")))); }
/** * Constructs a {@link Deserializer}, using the value of {@link #KAFKA_DESERIALIZER_TYPE}. */ private static Deserializer<?> getDeserializer(Properties props, Optional<Deserializers> deserializerType) throws ReflectiveOperationException { Deserializer<?> deserializer; if (deserializerType.isPresent()) { deserializer = ConstructorUtils.invokeConstructor(deserializerType.get().getDeserializerClass()); } else { deserializer = Deserializer.class .cast(ConstructorUtils.invokeConstructor(Class.forName(props.getProperty(KAFKA_DESERIALIZER_TYPE)))); } deserializer.configure(PropertiesUtils.propsToStringKeyMap(props), false); return deserializer; }
public static String serializeFlowId(FlowId id) throws IOException { Properties properties = new Properties(); properties.setProperty(FLOWCONFIG_ID_NAME, id.getFlowName()); properties.setProperty(FLOWCONFIG_ID_GROUP, id.getFlowGroup()); return PropertiesUtils.serialize(properties); }
public static FlowId deserializeFlowId(String serialized) throws IOException { Properties properties = PropertiesUtils.deserialize(serialized); FlowId id = new FlowId(); id.setFlowName(properties.getProperty(FLOWCONFIG_ID_NAME)); id.setFlowGroup(properties.getProperty(FLOWCONFIG_ID_GROUP)); return id; }
jobConfigBuilder.setNumConcurrentTasksPerInstance(PropertiesUtils.getPropAsInt(jobProps, GobblinClusterConfigurationKeys.HELIX_CLUSTER_TASK_CONCURRENCY, GobblinClusterConfigurationKeys.HELIX_CLUSTER_TASK_CONCURRENCY_DEFAULT));
@Test public void testConfluentJsonDeserializer() throws IOException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L); mockWorkUnitState.setProp("json.value.type", KafkaRecord.class.getName()); KafkaRecord testKafkaRecord = new KafkaRecord("Hello World"); Serializer<KafkaRecord> kafkaEncoder = new KafkaJsonSerializer<>(); kafkaEncoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false); Deserializer<KafkaRecord> kafkaDecoder = new KafkaJsonDeserializer<>(); kafkaDecoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false); ByteBuffer testKafkaRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testKafkaRecord)); KafkaSchemaRegistry<?, ?> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_JSON), kafkaDecoder, mockKafkaSchemaRegistry); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testKafkaRecordByteBuffer); Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testKafkaRecord); }
public static String serializeFlowConfig(FlowConfig flowConfig) throws IOException { Properties properties = ConfigUtils.configToProperties(ConfigFactory.parseMap(flowConfig.getProperties())); properties.setProperty(FLOWCONFIG_ID_NAME, flowConfig.getId().getFlowName()); properties.setProperty(FLOWCONFIG_ID_GROUP, flowConfig.getId().getFlowGroup()); if (flowConfig.hasSchedule()) { properties.setProperty(FLOWCONFIG_SCHEDULE_CRON, flowConfig.getSchedule().getCronSchedule()); properties.setProperty(FLOWCONFIG_SCHEDULE_RUN_IMMEDIATELY, Boolean.toString(flowConfig.getSchedule().isRunImmediately())); } if (flowConfig.hasTemplateUris()) { properties.setProperty(FLOWCONFIG_TEMPLATEURIS, flowConfig.getTemplateUris()); } return PropertiesUtils.serialize(properties); }
public static FlowConfig deserializeFlowConfig(String serialized) throws IOException { Properties properties = PropertiesUtils.deserialize(serialized); FlowConfig flowConfig = new FlowConfig().setId(new FlowId() .setFlowName(properties.getProperty(FLOWCONFIG_ID_NAME)) .setFlowGroup(properties.getProperty(FLOWCONFIG_ID_GROUP))); if (properties.containsKey(FLOWCONFIG_SCHEDULE_CRON)) { flowConfig.setSchedule(new Schedule() .setCronSchedule(properties.getProperty(FLOWCONFIG_SCHEDULE_CRON)) .setRunImmediately(Boolean.valueOf(properties.getProperty(FLOWCONFIG_SCHEDULE_RUN_IMMEDIATELY)))); } if (properties.containsKey(FLOWCONFIG_TEMPLATEURIS)) { flowConfig.setTemplateUris(properties.getProperty(FLOWCONFIG_TEMPLATEURIS)); } properties.remove(FLOWCONFIG_ID_NAME); properties.remove(FLOWCONFIG_ID_GROUP); properties.remove(FLOWCONFIG_SCHEDULE_CRON); properties.remove(FLOWCONFIG_SCHEDULE_RUN_IMMEDIATELY); properties.remove(FLOWCONFIG_TEMPLATEURIS); flowConfig.setProperties(new StringMap(Maps.fromProperties(properties))); return flowConfig; } }
jobConfigBuilder.setNumConcurrentTasksPerInstance(PropertiesUtils.getPropAsInt(jobProps, GobblinClusterConfigurationKeys.HELIX_CLUSTER_TASK_CONCURRENCY, GobblinClusterConfigurationKeys.HELIX_CLUSTER_TASK_CONCURRENCY_DEFAULT));
private boolean isRetriggeringEnabled() { return PropertiesUtils.getPropAsBoolean(jobProps, ConfigurationKeys.JOB_RETRIGGERING_ENABLED, ConfigurationKeys.DEFAULT_JOB_RETRIGGERING_ENABLED); }
/** * create a complete property file based on the given template */ public static Properties mergeTemplateWithUserCustomizedFile(Properties template, Properties userCustomized) { Properties cleanedTemplate = new Properties(); cleanedTemplate.putAll(template); if (cleanedTemplate.containsKey(ConfigurationKeys.REQUIRED_ATRRIBUTES_LIST)) { cleanedTemplate.remove(ConfigurationKeys.REQUIRED_ATRRIBUTES_LIST); } Properties cleanedUserCustomized = new Properties(); cleanedUserCustomized.putAll(userCustomized); if (cleanedUserCustomized.containsKey(ConfigurationKeys.JOB_TEMPLATE_PATH)) { cleanedUserCustomized.remove(ConfigurationKeys.JOB_TEMPLATE_PATH); } return PropertiesUtils.combineProperties(cleanedTemplate, cleanedUserCustomized); } }
@Test public void testExtractPropertiesWithPrefix() { Properties properties = new Properties(); properties.setProperty("k1.kk1", "v1"); properties.setProperty("k1.kk2", "v2"); properties.setProperty("k2.kk", "v3"); // First prefix Properties extractedPropertiesK1 = PropertiesUtils.extractPropertiesWithPrefix(properties, Optional.of("k1")); Assert.assertEquals(extractedPropertiesK1.getProperty("k1.kk1"), "v1"); Assert.assertEquals(extractedPropertiesK1.getProperty("k1.kk2"), "v2"); Assert.assertTrue(!extractedPropertiesK1.containsKey("k2.kk")); // Second prefix Properties extractedPropertiesK2 = PropertiesUtils.extractPropertiesWithPrefix(properties, Optional.of("k2")); Assert.assertTrue(!extractedPropertiesK2.containsKey("k1.kk1")); Assert.assertTrue(!extractedPropertiesK2.containsKey("k1.kk2")); Assert.assertEquals(extractedPropertiesK2.getProperty("k2.kk"), "v3"); // Missing prefix Properties extractedPropertiesK3 = PropertiesUtils.extractPropertiesWithPrefix(properties, Optional.of("k3")); Assert.assertTrue(!extractedPropertiesK3.containsKey("k1.kk1")); Assert.assertTrue(!extractedPropertiesK3.containsKey("k1.kk1")); Assert.assertTrue(!extractedPropertiesK3.containsKey("k2.kk")); } }
/** * Constructs a {@link Deserializer}, using the value of {@link #KAFKA_DESERIALIZER_TYPE}. */ private static Deserializer<?> getDeserializer(Properties props, Optional<Deserializers> deserializerType) throws ReflectiveOperationException { Deserializer<?> deserializer; if (deserializerType.isPresent()) { deserializer = ConstructorUtils.invokeConstructor(deserializerType.get().getDeserializerClass()); } else { deserializer = Deserializer.class .cast(ConstructorUtils.invokeConstructor(Class.forName(props.getProperty(KAFKA_DESERIALIZER_TYPE)))); } deserializer.configure(PropertiesUtils.propsToStringKeyMap(props), false); return deserializer; }
public static String serializeFlowId(FlowId id) throws IOException { Properties properties = new Properties(); properties.setProperty(FLOWCONFIG_ID_NAME, id.getFlowName()); properties.setProperty(FLOWCONFIG_ID_GROUP, id.getFlowGroup()); return PropertiesUtils.serialize(properties); }
public static FlowId deserializeFlowId(String serialized) throws IOException { Properties properties = PropertiesUtils.deserialize(serialized); FlowId id = new FlowId(); id.setFlowName(properties.getProperty(FLOWCONFIG_ID_NAME)); id.setFlowGroup(properties.getProperty(FLOWCONFIG_ID_GROUP)); return id; }
private boolean isDistributeJobEnabled() { Properties combinedProps = new Properties(); combinedProps.putAll(sysProps); combinedProps.putAll(jobProps); return (PropertiesUtils.getPropAsBoolean(combinedProps, GobblinClusterConfigurationKeys.DISTRIBUTED_JOB_LAUNCHER_ENABLED, Boolean.toString(GobblinClusterConfigurationKeys.DEFAULT_DISTRIBUTED_JOB_LAUNCHER_ENABLED))); }
private SchedulerDaemon(Properties defaultProperties, Properties customProperties) throws Exception { this(PropertiesUtils.combineProperties(defaultProperties, customProperties)); }
public SchedulerService(Properties props) { this(Boolean.parseBoolean( props.getProperty(ConfigurationKeys.SCHEDULER_WAIT_FOR_JOB_COMPLETION_KEY, ConfigurationKeys.DEFAULT_SCHEDULER_WAIT_FOR_JOB_COMPLETION)), Optional.of(PropertiesUtils.extractPropertiesWithPrefix(props, Optional.of("org.quartz.")))); }