@Inject RedisJedisManager( RedisConnectorConfig redisConnectorConfig, NodeManager nodeManager) { this.redisConnectorConfig = requireNonNull(redisConnectorConfig, "redisConfig is null"); this.jedisPoolCache = CacheBuilder.newBuilder().build(CacheLoader.from(this::createConsumer)); this.jedisPoolConfig = new JedisPoolConfig(); }
public TableNameCompleter(QueryRunner queryRunner) { this.queryRunner = requireNonNull(queryRunner, "queryRunner session was null!"); tableCache = CacheBuilder.newBuilder() .refreshAfterWrite(RELOAD_TIME_MINUTES, TimeUnit.MINUTES) .build(asyncReloading(CacheLoader.from(this::listTables), executor)); functionCache = CacheBuilder.newBuilder() .build(asyncReloading(CacheLoader.from(this::listFunctions), executor)); }
public NetworkLocationCache(NetworkTopology networkTopology) { this.networkTopology = requireNonNull(networkTopology, "networkTopology is null"); this.cache = CacheBuilder.newBuilder() .expireAfterWrite(1, DAYS) .refreshAfterWrite(12, HOURS) .build(asyncReloading(CacheLoader.from(this::locate), executor)); this.negativeCache = CacheBuilder.newBuilder() .expireAfterWrite(NEGATIVE_CACHE_DURATION.toMillis(), MILLISECONDS) .build(); }
@Inject public NodeInfoCache(Cluster cluster) { this.nodeNameCache = CacheBuilder.newBuilder() .expireAfterWrite(EXPIRE_DURATION.getQuantity(), EXPIRE_DURATION.getUnit()) .build(CacheLoader.from(cluster::nodeIdToName)); this.hostNameCache = CacheBuilder.newBuilder() .expireAfterWrite(EXPIRE_DURATION.getQuantity(), EXPIRE_DURATION.getUnit()) .build(CacheLoader.from(cluster::nodeIdToHostName)); }
@Inject public ExpressionCompiler(Metadata metadata, PageFunctionCompiler pageFunctionCompiler) { requireNonNull(metadata, "metadata is null"); this.pageFunctionCompiler = requireNonNull(pageFunctionCompiler, "pageFunctionCompiler is null"); this.cursorProcessors = CacheBuilder.newBuilder() .recordStats() .maximumSize(1000) .build(CacheLoader.from(key -> compile(key.getFilter(), key.getProjections(), new CursorProcessorCompiler(metadata), CursorProcessor.class))); this.cacheStatsMBean = new CacheStatsMBean(cursorProcessors); }
public RouteMetadata(final Env env) { CacheLoader<Class<?>, Map<String, Object>> loader = CacheLoader .from(RouteMetadata::extractMetadata); cache = env.name().equals("dev") ? CacheBuilder.newBuilder().maximumSize(0).build(loader) : CacheBuilder.newBuilder().build(loader); }
public MongoSession(TypeManager typeManager, MongoClient client, MongoClientConfig config) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.client = requireNonNull(client, "client is null"); this.schemaCollection = config.getSchemaCollection(); this.cursorBatchSize = config.getCursorBatchSize(); this.implicitPrefix = config.getImplicitRowFieldPrefix(); this.tableCache = CacheBuilder.newBuilder() .expireAfterWrite(1, HOURS) // TODO: Configure .refreshAfterWrite(1, MINUTES) .build(CacheLoader.from(this::loadTableSchema)); }
@Inject public KafkaSimpleConsumerManager( KafkaConnectorId connectorId, KafkaConnectorConfig kafkaConnectorConfig, NodeManager nodeManager) { this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); requireNonNull(kafkaConnectorConfig, "kafkaConfig is null"); this.connectTimeoutMillis = toIntExact(kafkaConnectorConfig.getKafkaConnectTimeout().toMillis()); this.bufferSizeBytes = toIntExact(kafkaConnectorConfig.getKafkaBufferSize().toBytes()); this.consumerCache = CacheBuilder.newBuilder().build(CacheLoader.from(this::createConsumer)); }
@Inject public ThriftMetadata( DriftClient<PrestoThriftService> client, ThriftHeaderProvider thriftHeaderProvider, TypeManager typeManager, @ForMetadataRefresh Executor metadataRefreshExecutor) { this.client = requireNonNull(client, "client is null"); this.thriftHeaderProvider = requireNonNull(thriftHeaderProvider, "thriftHeaderProvider is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.tableCache = CacheBuilder.newBuilder() .expireAfterWrite(EXPIRE_AFTER_WRITE.toMillis(), MILLISECONDS) .refreshAfterWrite(REFRESH_AFTER_WRITE.toMillis(), MILLISECONDS) .build(asyncReloading(CacheLoader.from(this::getTableMetadataInternal), metadataRefreshExecutor)); }
@Inject public LdapAuthenticator(LdapConfig serverConfig) { String ldapUrl = requireNonNull(serverConfig.getLdapUrl(), "ldapUrl is null"); this.userBindSearchPattern = requireNonNull(serverConfig.getUserBindSearchPattern(), "userBindSearchPattern is null"); this.groupAuthorizationSearchPattern = Optional.ofNullable(serverConfig.getGroupAuthorizationSearchPattern()); this.userBaseDistinguishedName = Optional.ofNullable(serverConfig.getUserBaseDistinguishedName()); if (groupAuthorizationSearchPattern.isPresent()) { checkState(userBaseDistinguishedName.isPresent(), "Base distinguished name (DN) for user is null"); } Map<String, String> environment = ImmutableMap.<String, String>builder() .put(INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory") .put(PROVIDER_URL, ldapUrl) .build(); checkEnvironment(environment); this.basicEnvironment = environment; this.authenticationCache = CacheBuilder.newBuilder() .expireAfterWrite(serverConfig.getLdapCacheTtl().toMillis(), MILLISECONDS) .build(CacheLoader.from(this::authenticate)); }
public void testMaximumSize_withoutWeigher() { CacheBuilder<Object, Object> builder = CacheBuilder.from(parse("maximumSize=9000")); builder.build(CacheLoader.from(Suppliers.ofInstance(null))); }
public PageFunctionCompiler(Metadata metadata, int expressionCacheSize) { this.metadata = requireNonNull(metadata, "metadata is null"); this.determinismEvaluator = new DeterminismEvaluator(metadata.getFunctionRegistry()); if (expressionCacheSize > 0) { projectionCache = CacheBuilder.newBuilder() .recordStats() .maximumSize(expressionCacheSize) .build(CacheLoader.from(projection -> compileProjectionInternal(projection, Optional.empty()))); projectionCacheStats = new CacheStatsMBean(projectionCache); } else { projectionCache = null; projectionCacheStats = null; } if (expressionCacheSize > 0) { filterCache = CacheBuilder.newBuilder() .recordStats() .maximumSize(expressionCacheSize) .build(CacheLoader.from(filter -> compileFilterInternal(filter, Optional.empty()))); filterCacheStats = new CacheStatsMBean(filterCache); } else { filterCache = null; filterCacheStats = null; } }
@Inject public LocalFileTables(LocalFileConfig config) { ImmutableMap.Builder<SchemaTableName, DataLocation> dataLocationBuilder = ImmutableMap.builder(); ImmutableMap.Builder<SchemaTableName, LocalFileTableHandle> tablesBuilder = ImmutableMap.builder(); ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> tableColumnsBuilder = ImmutableMap.builder(); String httpRequestLogLocation = config.getHttpRequestLogLocation(); if (httpRequestLogLocation != null) { Optional<String> pattern = Optional.empty(); if (config.getHttpRequestLogFileNamePattern() != null) { pattern = Optional.of(config.getHttpRequestLogFileNamePattern()); } SchemaTableName table = getSchemaTableName(); DataLocation dataLocation = new DataLocation(httpRequestLogLocation, pattern); LocalFileTableHandle tableHandle = new LocalFileTableHandle(table, getTimestampColumn(), getServerAddressColumn()); tablesBuilder.put(table, tableHandle); tableColumnsBuilder.put(table, HttpRequestLogTable.getColumns()); dataLocationBuilder.put(table, dataLocation); } tables = tablesBuilder.build(); tableColumns = tableColumnsBuilder.build(); tableDataLocations = dataLocationBuilder.build(); cachedFiles = CacheBuilder.newBuilder() .expireAfterWrite(10, SECONDS) .build(CacheLoader.from(key -> tableDataLocations.get(key).files())); }
public void testMaximumWeight_withoutWeigher() { CacheBuilder<Object, Object> builder = CacheBuilder.from(parse("maximumWeight=9000")); try { builder.build(CacheLoader.from(Suppliers.ofInstance(null))); fail(); } catch (IllegalStateException expected) { } }
public void testDisableCaching() { // Functional test: assert that CacheBuilderSpec.disableCaching() // disables caching. It's irrelevant how it does so. CacheBuilder<Object, Object> builder = CacheBuilder.from(CacheBuilderSpec.disableCaching()); Object key = new Object(); Object value = new Object(); LoadingCache<Object, Object> cache = builder.build(CacheLoader.from(Suppliers.ofInstance(value))); assertSame(value, cache.getUnchecked(key)); assertEquals(0, cache.size()); assertFalse(cache.asMap().containsKey(key)); }
@VisibleForTesting static Comparator<ActualProperties> streamingExecutionPreference(PreferredProperties preferred) { // Calculating the matches can be a bit expensive, so cache the results between comparisons LoadingCache<List<LocalProperty<Symbol>>, List<Optional<LocalProperty<Symbol>>>> matchCache = CacheBuilder.newBuilder() .build(CacheLoader.from(actualProperties -> LocalProperties.match(actualProperties, preferred.getLocalProperties()))); return (actual1, actual2) -> { List<Optional<LocalProperty<Symbol>>> matchLayout1 = matchCache.getUnchecked(actual1.getLocalProperties()); List<Optional<LocalProperty<Symbol>>> matchLayout2 = matchCache.getUnchecked(actual2.getLocalProperties()); return ComparisonChain.start() .compareTrueFirst(hasLocalOptimization(preferred.getLocalProperties(), matchLayout1), hasLocalOptimization(preferred.getLocalProperties(), matchLayout2)) .compareTrueFirst(meetsPartitioningRequirements(preferred, actual1), meetsPartitioningRequirements(preferred, actual2)) .compare(matchLayout1, matchLayout2, matchedLayoutPreference()) .result(); }; }
public void testMaximumSize_withWeigher() { CacheBuilder<Object, Object> builder = CacheBuilder.from(parse("maximumSize=9000")); builder.weigher(constantWeigher(42)).build(CacheLoader.from(Suppliers.ofInstance(null))); }
@Inject public ElasticsearchClient(ElasticsearchTableDescriptionProvider descriptions, ElasticsearchConnectorConfig config) throws IOException { tableDescriptions = requireNonNull(descriptions, "description is null"); ElasticsearchConnectorConfig configuration = requireNonNull(config, "config is null"); requestTimeout = configuration.getRequestTimeout(); maxAttempts = configuration.getMaxRequestRetries(); maxRetryTime = configuration.getMaxRetryTime(); for (ElasticsearchTableDescription tableDescription : tableDescriptions.getAllTableDescriptions()) { if (!clients.containsKey(tableDescription.getClusterName())) { Settings settings = Settings.builder().put("cluster.name", tableDescription.getClusterName()).build(); TransportAddress address = new TransportAddress(InetAddress.getByName(tableDescription.getHost()), tableDescription.getPort()); TransportClient client = new PreBuiltTransportClient(settings).addTransportAddress(address); clients.put(tableDescription.getClusterName(), client); } } this.columnMetadataCache = CacheBuilder.newBuilder() .expireAfterWrite(30, MINUTES) .refreshAfterWrite(15, MINUTES) .maximumSize(500) .build(asyncReloading(CacheLoader.from(this::loadColumns), executor)); }
public void testMaximumWeight_withWeigher() { CacheBuilder<Object, Object> builder = CacheBuilder.from(parse("maximumWeight=9000")); builder.weigher(constantWeigher(42)).build(CacheLoader.from(Suppliers.ofInstance(null))); }
@BeforeMethod public void setUp() { executor = newSingleThreadScheduledExecutor(daemonThreadsNamed("test-merge-operator-%s")); serdeFactory = new TestingPagesSerdeFactory(); taskBuffers = CacheBuilder.newBuilder().build(CacheLoader.from(TestingTaskBuffer::new)); httpClient = new TestingHttpClient(new TestingExchangeHttpClientHandler(taskBuffers), executor); exchangeClientFactory = new ExchangeClientFactory(new ExchangeClientConfig(), httpClient, executor); orderingCompiler = new OrderingCompiler(); }