public void start() { Duration[] defaultLatchIntervals = {Duration.apply(1, TimeUnit.MINUTES)}; @SuppressWarnings("deprecation") AdminServiceFactory adminServiceFactory = new AdminServiceFactory( this.mPort, 20, List$.MODULE$.<StatsFactory>empty(), Option.<String>empty(), List$.MODULE$.<Regex>empty(), Map$.MODULE$.<String, CustomHttpHandler>empty(), JavaConversions .asScalaBuffer(Arrays.asList(defaultLatchIntervals)).toList() ); RuntimeEnvironment runtimeEnvironment = new RuntimeEnvironment(this); adminServiceFactory.apply(runtimeEnvironment); try { Properties properties = new Properties(); properties.load(this.getClass().getResource("build.properties").openStream()); String buildRevision = properties.getProperty("build_revision", "unknown"); LOG.info("build.properties build_revision: {}", properties.getProperty("build_revision", "unknown")); StatsUtil.setLabel("secor.build_revision", buildRevision); } catch (Throwable t) { LOG.error("Failed to load properties from build.properties", t); } } }
/** * Get the persisted framework ID. * @return the current ID or empty if none is yet persisted. * @throws Exception on ZK failures, interruptions. */ @Override public Option<Protos.FrameworkID> getFrameworkID() throws Exception { synchronized (startStopLock) { verifyIsRunning(); Option<Protos.FrameworkID> frameworkID; byte[] value = frameworkIdInZooKeeper.getValue(); if (value.length == 0) { frameworkID = Option.empty(); } else { frameworkID = Option.apply(Protos.FrameworkID.newBuilder().setValue(new String(value, ConfigConstants.DEFAULT_CHARSET)).build()); } return frameworkID; } }
Map<Integer, String> rackByBroker = new HashMap<>(); for (BrokerMetadata bm : JavaConversions.seqAsJavaList(AdminUtils.getBrokerMetadatas(zkUtils, RackAwareMode.Enforced$.MODULE$, Option.empty()))) {
NoOpMetricRegistry.INSTANCE, "localhost", Option.<String>empty(), false, TaskManager.class);
/** * Retrieves a configuration value as a <code>String</code>. * * @param key configuration key (relative to configuration root key) * @return a configuration value or <code>null</code> */ public String getString(String key) { return Scala.orNull(conf.getString(key, scala.Option.<scala.collection.immutable.Set<java.lang.String>>empty())); }
/** * Retrieves a configuration value as a <code>String</code>. * * @param key configuration key (relative to configuration root key) * @param defaultString default value if configuration key doesn't exist * @return a configuration value or the defaultString */ public String getString(String key, String defaultString) { return Scala.orElse(conf.getString(key, scala.Option.<scala.collection.immutable.Set<java.lang.String>>empty()), defaultString); }
private static ExternalCatalogTable mockExternalCatalogTable(String topic, String brokerAddress) { TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO}); ConnectorDescriptor descriptor = new ConnectorDescriptor("kafka+json", 1, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TOPIC_SCHEMA_KEY, schema); properties.putString(TOPIC_NAME_KEY, topic); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.GROUP_ID_CONFIG, "foo"); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); } }
ExternalCatalogTable toExternalCatalogTable() { TableSchema tableSchema = new TableSchema(schema.getFieldNames(), schema.getFieldTypes()); ConnectorDescriptor descriptor = new ConnectorDescriptor(CONNECTOR_TYPE, CONNECTOR_VERSION, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TABLE_SCHEMA_CONNECTOR_PROPERTY, tableSchema); properties.putString(TABLE_DATA_CONNECTOR_PROPERTY, serializeRows()); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); }
private MiniKafkaCluster(List<String> brokerIds) throws IOException, InterruptedException { this.zkServer = new EmbeddedZooKeeper(); this.tempDir = Files.createTempDirectory(Paths.get(System.getProperty("java.io.tmpdir")), "mini-kafka-cluster"); this.kafkaServer = new ArrayList<>(); for (String id : brokerIds) { KafkaConfig c = new KafkaConfig(createBrokerConfig(id)); kafkaServer.add(new KafkaServer(c, SystemTime$.MODULE$, Option.empty())); } }
/** * The context for loading an application. * * @param environment the application environment * @param initialSettings the initial settings. These settings are merged with the settings from the loaded * configuration files, and together form the initialConfiguration provided by the context. It * is intended for use in dev mode, to allow the build system to pass additional configuration * into the application. */ public Context(Environment environment, Map<String,Object> initialSettings) { this.underlying = new play.api.ApplicationLoader.Context( environment.underlying(), scala.Option.empty(), new play.core.DefaultWebCommands(), play.api.Configuration.load(environment.underlying(), play.libs.Scala.asScala(initialSettings))); }
/** * The context for loading an application. * * @param environment the application environment * @param initialSettings the initial settings. These settings are merged with the settings from the loaded * configuration files, and together form the initialConfiguration provided by the context. It * is intended for use in dev mode, to allow the build system to pass additional configuration * into the application. */ public Context(Environment environment, Map<String, Object> initialSettings) { this.underlying = new play.api.ApplicationLoader.Context( environment.asScala(), play.api.Configuration.load(environment.asScala(), play.libs.Scala.asScala(initialSettings)), new DefaultApplicationLifecycle(), scala.Option.empty()); }
/** * The context for loading an application. * * @param environment the application environment * @param initialSettings the initial settings. These settings are merged with the settings from the loaded * configuration files, and together form the initialConfiguration provided by the context. It * is intended for use in dev mode, to allow the build system to pass additional configuration * into the application. */ public Context(Environment environment, Map<String, Object> initialSettings) { this.underlying = new play.api.ApplicationLoader.Context( environment.asScala(), play.api.Configuration.load(environment.asScala(), play.libs.Scala.asScala(initialSettings)), new DefaultApplicationLifecycle(), scala.Option.empty()); }
/** * The context for loading an application. * * @param environment the application environment * @param initialSettings the initial settings. These settings are merged with the settings from the loaded * configuration files, and together form the initialConfiguration provided by the context. It * is intended for use in dev mode, to allow the build system to pass additional configuration * into the application. */ public Context(Environment environment, Map<String, Object> initialSettings) { this.underlying = new play.api.ApplicationLoader.Context( environment.asScala(), play.api.Configuration.load(environment.asScala(), play.libs.Scala.asScala(initialSettings)), new DefaultApplicationLifecycle(), scala.Option.empty()); }
/** * Converts a {@link java.util.Optional} to a Scala Option. */ public static <T> Option<T> asOption( java.util.Optional<T> optional ) { if (optional.isPresent()) return Option.<T>apply(optional.get()); else return Option.<T>empty(); }
/** * Test Clean-By-Versions using prepped versions of bulk-insert/upsert API */ @Test public void testBulkInsertPreppedAndCleanByCommits() throws Exception { testInsertAndCleanByCommits( (client, recordRDD, commitTime) -> client.bulkInsertPreppedRecords(recordRDD, commitTime, Option.empty()), HoodieWriteClient::upsertPreppedRecords, true); }
private List<BrokerMetadata> getBrokerMetadatas() { ZkClient zkClient = new ZkClient(getZookeeperConnectString(), 1000, 1000, ZKStringSerializer$.MODULE$); ZkUtils zkUtils = new ZkUtils(zkClient, new ZkConnection(getZookeeperConnectString()), false); return JavaConversions.seqAsJavaList(AdminUtils.getBrokerMetadatas(zkUtils, Enforced$.MODULE$, Option.empty())); }
/** * Test tagLocation API after bulkInsertPrepped() */ @Test public void testTagLocationAfterBulkInsertPrepped() throws Exception { testTagLocation(getConfigBuilder().withBulkInsertParallelism(1).build(), (writeClient, recordRDD, commitTime) -> writeClient.bulkInsertPreppedRecords(recordRDD, commitTime, Option.empty()), HoodieWriteClient::upsertPreppedRecords, true); }