/** * Fetches the URL of the druid coordinator. * * @return druid coordinator URL */ public static String getDruidCoordUrl() { return SYSTEM_CONFIG.getStringProperty(DRUID_COORD_URL_KEY, null); }
/** * Get a package scoped variable name. * * @param suffix The variable name of the configuration variable without the package prefix * @return variable name */ default String getPackageVariableName(String suffix) { return getStringProperty("package_name") + "__" + suffix; }
/** * Get a package scoped variable name. * * @param suffix The variable name of the configuration variable without the package prefix * @return variable name */ default String getPackageVariableName(String suffix) { return getStringProperty("package_name") + "__" + suffix; }
/** * Get the output formatter for the system. * * @return the output formatter, pulling it from a configuration if we've not gotten it before. */ public static DateTimeFormatter getOutputFormatter() { if (DATETIME_OUTPUT_FORMATTER == null) { String formatString = SYSTEM_CONFIG.getStringProperty(OUTPUT_DATETIME_FORMAT, "yyyy-MM-dd' 'HH:mm:ss.SSS"); DATETIME_OUTPUT_FORMATTER = DateTimeFormat.forPattern(formatString); } return DATETIME_OUTPUT_FORMATTER; }
/** * Fetches the druid Priority. * * @return druid priority */ public static Integer getDruidPriority() { String priority = SYSTEM_CONFIG.getStringProperty(DRUID_PRIORITY_KEY, "1"); return Integer.parseInt(priority); }
/** * Initializes the connection to the sql backend and prepares * the {@link SqlBackedClient} for converting queries. * * @param mapper The mapper for all JSON processing. */ private void initializeSqlBackend(ObjectMapper mapper) { String dbUrl = SYSTEM_CONFIG.getStringProperty(DATABASE_URL); String driver = SYSTEM_CONFIG.getStringProperty(DATABASE_DRIVER); String user = SYSTEM_CONFIG.getStringProperty(DATABASE_USERNAME); String pass = SYSTEM_CONFIG.getStringProperty(DATABASE_PASSWORD); try { sqlConverter = new DefaultSqlBackedClient(dbUrl, driver, user, pass, mapper); } catch (SQLException e) { LOG.warn("Failed to initialize Sql backend", e); } }
/** * Get the path for the lucene index files for a provider. * * @param providerName Name of the provider * * @return the path to the lucene index files for this provider. */ private static String getProviderPath(String providerName) { // Path eg: /home/y/var/bard_webservice/dimension1/lucene_indexes/ return String.format( "%s/dimensionCache/%s/lucene_indexes/", SYSTEM_CONFIG.getStringProperty(LUCENE_INDEX_PATH), providerName ).replaceAll("/+", "/"); // replaces one or more slashes with one slash: } }
/** * Constructor. * * @param versionKey The property name from which to get the version from the SystemConfig * @param gitShaKey The property name from which to get the git sha from the SystemConfig */ public VersionHealthCheck(String versionKey, String gitShaKey) { usedVersionKey = versionKey; String tempVersion; try { tempVersion = SYSTEM_CONFIG.getStringProperty(versionKey); } catch (SystemConfigException ignored) { LOG.error("{} not found in configuration", versionKey); tempVersion = null; } version = tempVersion; String tempGitSha; try { tempGitSha = SYSTEM_CONFIG.getStringProperty(gitShaKey); } catch (SystemConfigException ignored) { LOG.warn("{} not found in configuration", gitShaKey); tempGitSha = null; } gitSha = tempGitSha; }
/** * Fetches the druid URL. * * @return druid URL */ public static String getDruidUrl() { String url = SYSTEM_CONFIG.getStringProperty(DRUID_BROKER_URL_KEY, null); validateUrl(url); return url; }
/** * Build the Supplier for Druid data request headers. * * @return The Druid data request header Supplier. */ protected Supplier<Map<String, String>> buildDruidWebServiceHeaderSupplier() { Supplier<Map<String, String>> supplier = HashMap::new; String customSupplierClassString = SYSTEM_CONFIG.getStringProperty(DRUID_HEADER_SUPPLIER_CLASS, null); if (customSupplierClassString != null && !customSupplierClassString.equals("")) { try { Class<?> c = Class.forName(customSupplierClassString); Constructor<?> constructor = c.getConstructor(); supplier = (Supplier<Map<String, String>>) constructor.newInstance(); } catch (Exception e) { LOG.error( "Unable to load the Druid query header supplier, className: {}, exception: {}", customSupplierClassString, e ); throw new IllegalStateException(e); } } return supplier; }
/** * Returns a clock for generating instants for timestamps. * * @return The clock with which to generate instants for timestamps, by default uses the system clock with the * system timezone */ protected Clock getClock() { return Clock.system( ZoneId.of(SYSTEM_CONFIG.getStringProperty( SYSTEM_CONFIG.getPackageVariableName(SYSTEM_CONFIG_TIMEZONE_KEY), "UTC" )) ); }
/** * Get the DimensionBackend for tests. * * @return the configured dimension backend */ public static DimensionBackend getBackend() { // Read every time, since some tests set dimension_backend String dimensionBackend = SYSTEM_CONFIG.getStringProperty(DIMENSION_BACKEND_KEY, "memory"); if ("redis".equalsIgnoreCase(dimensionBackend)) { return REDIS; } return MEMORY; } }
/** * Get the DimensionBackend for tests. * * @return the configured dimension backend */ public static DimensionBackend getBackend() { // Read every time, since some tests set dimension_backend String dimensionBackend = SYSTEM_CONFIG.getStringProperty(DIMENSION_BACKEND_KEY, "memory"); if ("redis".equalsIgnoreCase(dimensionBackend)) { return REDIS; } return MEMORY; } }
/** * Get an instance of LogFormatter. * * @return an instance of LogFormatter */ public static LogFormatter getInstance() { if (logFormatter == null) { String logFormatterImplementation = SystemConfigProvider.getInstance().getStringProperty( SYSTEM_CONFIG.getPackageVariableName(LOG_FORMATTER_IMPLEMENTATION_SETTING_NAME), DEFAULT_LOG_FORMATTER_IMPL ); try { logFormatter = (LogFormatter) Class.forName(logFormatterImplementation).newInstance(); } catch (Exception exception) { LOG.error("Exception while loading Log formatter: {}", exception); throw new IllegalStateException(exception); } } return logFormatter; } }
/** * Generates ZonedSchema object from given JsonNode. * * @param schemaNode JsonNode which contains all the columns, timezone and granularity * * @return ResultSetSchema object generated from the JsonNode */ private ResultSetSchema getResultSetSchema(JsonNode schemaNode) { DateTimeZone timezone = generateTimezone( schemaNode.get(SCHEMA_TIMEZONE).asText(), DateTimeZone.forID( SYSTEM_CONFIG.getStringProperty(SYSTEM_CONFIG.getPackageVariableName("timezone"), "UTC") ) ); //Recreate ResultSetSchema LinkedHashSet<Column> columns = Stream.concat( Streams.stream(schemaNode.get(SCHEMA_DIM_COLUMNS)) .map(JsonNode::asText) .map(this::resolveDimensionName) .map(DimensionColumn::new), Streams.stream(() -> schemaNode.get(SCHEMA_METRIC_COLUMNS_TYPE).fields()) .map(entry -> new MetricColumnWithValueType(entry.getKey(), entry.getValue().asText())) ).collect(Collectors.toCollection(LinkedHashSet::new)); return new ResultSetSchema(generateGranularity(schemaNode.get(SCHEMA_GRANULARITY).asText(), timezone), columns); }
/** * Returns whether the feature flag has been configured. * * @return true if the feature flag has been configured. */ default boolean isSet() { SystemConfig systemConfig = SystemConfigProvider.getInstance(); try { return systemConfig.getStringProperty(systemConfig.getPackageVariableName(getName())) != null; } catch (SystemConfigException exception) { return false; } }
this.asyncAfter = generateAsyncAfter( asyncAfter == null ? SYSTEM_CONFIG.getStringProperty(SYSTEM_CONFIG.getPackageVariableName("default_asyncAfter")) : asyncAfter );
/** * Constructor. * * @param druidClient The client to query Druid coordinator * @param dimensionDictionary A {@link com.yahoo.bard.webservice.data.dimension.DimensionDictionary} that is used * to obtain a list of lookups in Fili. */ public RegisteredLookupMetadataLoadTask(DruidWebService druidClient, DimensionDictionary dimensionDictionary) { super( RegisteredLookupMetadataLoadTask.class.getSimpleName(), SYSTEM_CONFIG.getLongProperty(INITIAL_LOOKUP_CHECKING_DELAY, 0), SYSTEM_CONFIG.getLongProperty(LOOKUP_NORMAL_CHECKING_PERIOD_KEY, TimeUnit.MINUTES.toMillis(1)) ); this.druidClient = druidClient; this.dimensionDictionary = dimensionDictionary; this.successCallback = buildLookupSuccessCallback(); this.failureCallback = getFailureCallback(); this.errorCallback = getErrorCallback(); this.pendingLookups = new HashSet<>(); this.lookupTiers = getTiers(SYSTEM_CONFIG.getStringProperty(TIERS_KEY, "__default")); }
@Override public boolean isOn() { if (on == null) { // TODO: Remove this if conditional after cache V1 & V2 configuration flags are removed if (BardFeatureFlag.DRUID_CACHE.isSet() || BardFeatureFlag.DRUID_CACHE_V2.isSet()) { // no cache if (this.value.equals("NoCache")) { return ! BardFeatureFlag.DRUID_CACHE.isOn(); } return (this.value.equals("Ttl") && !BardFeatureFlag.DRUID_CACHE_V2.isOn() && BardFeatureFlag.DRUID_CACHE.isOn() ) || (this.value.equals("LocalSignature") && BardFeatureFlag.DRUID_CACHE.isOn() && BardFeatureFlag.DRUID_CACHE_V2.isOn()); } on = value.equalsIgnoreCase(SYSTEM_CONFIG.getStringProperty( SYSTEM_CONFIG.getPackageVariableName("query_response_caching_strategy"), "NoCache") ); } return on; }