@VisibleForTesting public MmapMemoryManager(String dirPathName, String segmentName) { this(dirPathName, segmentName, new ServerMetrics(new MetricsRegistry())); }
@VisibleForTesting public DirectMemoryManager(final String segmentName) { this(segmentName, new ServerMetrics(new MetricsRegistry())); }
protected MockSegmentCompletionManager(PinotLLCRealtimeSegmentManager segmentManager, boolean isLeader, boolean isConnected) { super(createMockHelixManager(isLeader, isConnected), segmentManager, new ControllerMetrics(new MetricsRegistry())); _isLeader = isLeader; }
protected MockPinotLLCRealtimeSegmentManager() { super(null, clusterName, null, null, null, CONTROLLER_CONF, new ControllerMetrics(new MetricsRegistry())); }
public ControllerStarter(ControllerConf conf) { _config = conf; _adminApp = new ControllerAdminApiApplication(_config.getQueryConsoleWebappPath(), _config.getQueryConsoleUseHttps()); // Do not use this before the invocation of {@link PinotHelixResourceManager::start()}, which happens in {@link ControllerStarter::start()} _helixResourceManager = new PinotHelixResourceManager(_config); _metricsRegistry = new MetricsRegistry(); _controllerMetrics = new ControllerMetrics(_metricsRegistry); _executorService = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("restapi-multiget-thread-%d").build()); }
private CompositeFuture<byte[]> asyncSendRequestAndGetResponse(SimpleScatterGatherRequest request, final ScatterGatherStats scatterGatherStats) throws InterruptedException { final BrokerMetrics brokerMetrics = new BrokerMetrics(new MetricsRegistry()); return _scatterGather.scatterGather(request, scatterGatherStats, brokerMetrics); }
@BeforeClass public void setUp() { _tableSizeReader = mock(TableSizeReader.class); _tableConfig = mock(TableConfig.class); _quotaConfig = mock(QuotaConfig.class); _controllerMetrics = new ControllerMetrics(new MetricsRegistry()); _validationConfig = mock(SegmentsValidationAndRetentionConfig.class); _pinotHelixResourceManager = mock(PinotHelixResourceManager.class); when(_tableConfig.getValidationConfig()).thenReturn(_validationConfig); when(_validationConfig.getReplicationNumber()).thenReturn(2); TEST_DIR.mkdirs(); }
@BeforeClass public void setUp() { serverMetrics = new ServerMetrics(new MetricsRegistry()); channelHandlerContext = mock(ChannelHandlerContext.class, RETURNS_DEEP_STUBS); when(channelHandlerContext.channel().remoteAddress()) .thenAnswer((Answer<InetSocketAddress>) invocationOnMock -> new InetSocketAddress("localhost", 60000)); queryScheduler = mock(QueryScheduler.class); queryExecutor = new ServerQueryExecutorV1Impl(); latestQueryTime = new LongAccumulator(Long::max, 0); resourceManager = new UnboundedResourceManager(new PropertiesConfiguration()); }
private RealtimeTableDataManager createTableDataManager() { final String instanceId = "server-1"; SegmentBuildTimeLeaseExtender.create(instanceId, new ServerMetrics(new MetricsRegistry())); RealtimeTableDataManager tableDataManager = mock(RealtimeTableDataManager.class); when(tableDataManager.getServerInstance()).thenReturn(instanceId); RealtimeSegmentStatsHistory statsHistory = mock(RealtimeSegmentStatsHistory.class); when(statsHistory.getEstimatedCardinality(any(String.class))).thenReturn(200); when(statsHistory.getEstimatedAvgColSize(any(String.class))).thenReturn(32); when(tableDataManager.getStatsHistory()).thenReturn(statsHistory); return tableDataManager; }
@BeforeClass public void setup() { PinotHelixResourceManager mockPinotHelixResourceManager = mock(PinotHelixResourceManager.class); _mockHelixManager = mock(HelixManager.class); when(mockPinotHelixResourceManager.getHelixZkManager()).thenReturn(_mockHelixManager); ControllerConf controllerConfig = new ControllerConf(); ControllerMetrics controllerMetrics = new ControllerMetrics(new MetricsRegistry()); _realtimeSegmentRelocator = new TestRealtimeSegmentRelocator(mockPinotHelixResourceManager, controllerConfig, controllerMetrics); final int maxInstances = 20; serverNames = new String[maxInstances]; consumingServerNames = new String[maxInstances]; for (int i = 0; i < maxInstances; i++) { serverNames[i] = "Server_" + i; consumingServerNames[i] = "ConsumingServer_" + i; } }
/** * Helper to send request to server and get back response * @param request * @return * @throws InterruptedException * @throws ExecutionException */ private String sendRequestAndGetResponse(SimpleScatterGatherRequest request, final ScatterGatherStats scatterGatherStats) throws InterruptedException, ExecutionException { BrokerMetrics brokerMetrics = new BrokerMetrics(new MetricsRegistry()); CompositeFuture<byte[]> future = _scatterGather.scatterGather(request, scatterGatherStats, brokerMetrics); byte[] bytes = future.getOne(); if (bytes == null) { return null; } else { return new String(bytes); } }
private TableDataManager makeTestableManager() throws Exception { TableDataManager tableDataManager = new OfflineTableDataManager(); TableDataManagerConfig config; { config = mock(TableDataManagerConfig.class); when(config.getTableName()).thenReturn(TABLE_NAME); when(config.getDataDir()).thenReturn(_tmpDir.getAbsolutePath()); } tableDataManager .init(config, "dummyInstance", mock(ZkHelixPropertyStore.class), new ServerMetrics(new MetricsRegistry())); tableDataManager.start(); Field segsMapField = BaseTableDataManager.class.getDeclaredField("_segmentDataManagerMap"); segsMapField.setAccessible(true); _internalSegMap = (Map<String, ImmutableSegmentDataManager>) segsMapField.get(tableDataManager); return tableDataManager; }
public BrokerServerBuilder(Configuration config, RoutingTable routingTable, TimeBoundaryService timeBoundaryService, LiveInstancesChangeListenerImpl liveInstanceChangeListener, TableQueryQuotaManager tableQueryQuotaManager) { _state.set(State.INIT); _config = config; _delayedShutdownTimeMs = config.getLong(DELAY_SHUTDOWN_TIME_MS_CONFIG, DEFAULT_DELAY_SHUTDOWN_TIME_MS); _routingTable = routingTable; _timeBoundaryService = timeBoundaryService; _liveInstanceChangeListener = liveInstanceChangeListener; _tableQueryQuotaManager = tableQueryQuotaManager; _accessControlFactory = AccessControlFactory.loadFactory(_config.subset(ACCESS_CONTROL_PREFIX)); _metricsRegistry = new MetricsRegistry(); MetricsHelper.initializeMetrics(config.subset(METRICS_CONFIG_PREFIX)); MetricsHelper.registerMetricsRegistry(_metricsRegistry); _brokerMetrics = new BrokerMetrics(_metricsRegistry, !_config.getBoolean(TABLE_LEVEL_METRICS_CONFIG, true)); _brokerMetrics.initializeGlobalMeters(); _brokerRequestHandler = buildRequestHandler(); _brokerAdminApplication = new BrokerAdminApiApplication(this); }
private void initMetrics() { MetricsHelper.initializeMetrics(_serverConf.getMetricsConfig()); MetricsRegistry metricsRegistry = new MetricsRegistry(); MetricsHelper.registerMetricsRegistry(metricsRegistry); _serverMetrics = new ServerMetrics(metricsRegistry, !_serverConf.emitTableLevelMetrics()); _serverMetrics.initializeGlobalMeters(); }
@Test public void testMetricsHelperRegistration() { listenerOneOkay = false; listenerTwoOkay = false; Map<String, String> configKeys = new HashMap<String, String>(); configKeys.put("pinot.broker.metrics.metricsRegistryRegistrationListeners", ListenerOne.class.getName() + "," + ListenerTwo.class.getName()); Configuration configuration = new MapConfiguration(configKeys); MetricsRegistry registry = new MetricsRegistry(); // Initialize the MetricsHelper and create a new timer MetricsHelper.initializeMetrics(configuration.subset("pinot.broker.metrics")); MetricsHelper.registerMetricsRegistry(registry); MetricsHelper.newTimer(registry, new MetricName(MetricsHelperTest.class, "dummy"), TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS); // Check that the two listeners fired assertTrue(listenerOneOkay); assertTrue(listenerTwoOkay); } }
@Test public void missingIdealTest() throws Exception { final String tableName = "myTable_REALTIME"; List<String> allTableNames = new ArrayList<>(); allTableNames.add(tableName); { helixResourceManager = mock(PinotHelixResourceManager.class); when(helixResourceManager.getAllTables()).thenReturn(allTableNames); when(helixResourceManager.getTableIdealState(tableName)).thenReturn(null); when(helixResourceManager.getTableExternalView(tableName)).thenReturn(null); } { config = mock(ControllerConf.class); when(config.getStatusCheckerFrequencyInSeconds()).thenReturn(300); when(config.getStatusCheckerWaitForPushTimeInSeconds()).thenReturn(300); } metricsRegistry = new MetricsRegistry(); controllerMetrics = new ControllerMetrics(metricsRegistry); segmentStatusChecker = new SegmentStatusChecker(helixResourceManager, config, controllerMetrics); segmentStatusChecker.init(); segmentStatusChecker.run(); Assert.assertEquals(controllerMetrics.getValueOfTableGauge(tableName, ControllerGauge.SEGMENTS_IN_ERROR_STATE), Long.MIN_VALUE); Assert.assertEquals(controllerMetrics.getValueOfTableGauge(tableName, ControllerGauge.NUMBER_OF_REPLICAS), Long.MIN_VALUE); Assert.assertEquals(controllerMetrics.getValueOfTableGauge(tableName, ControllerGauge.PERCENT_OF_REPLICAS), Long.MIN_VALUE); }
private void setup() { MetricsRegistry registry = new MetricsRegistry(); _timedExecutor = new ScheduledThreadPoolExecutor(1); _service = new ThreadPoolExecutor(10, 10, 10, TimeUnit.DAYS, new LinkedBlockingDeque<Runnable>()); _eventLoopGroup = new NioEventLoopGroup(10); _timer = new HashedWheelTimer(); NettyClientMetrics clientMetrics = new NettyClientMetrics(registry, "client_"); PooledNettyClientResourceManager rm = new PooledNettyClientResourceManager(_eventLoopGroup, _timer, clientMetrics); _pool = new KeyedPoolImpl<PooledNettyClientResourceManager.PooledClientConnection>(1, _maxActiveConnections, 300000, 10, rm, _timedExecutor, MoreExecutors.sameThreadExecutor(), registry); rm.setPool(_pool); _scatterGather = new ScatterGatherImpl(_pool, _service); for (AsyncReader r : _readerThreads) { r.start(); } }
@Test public void testRealtimeLLCCleanup() throws Exception { final int initialNumSegments = 8; final long now = System.currentTimeMillis(); final int replicaCount = 1; TableConfig tableConfig = createRealtimeTableConfig1(replicaCount); List<String> removedSegments = new ArrayList<>(); PinotHelixResourceManager pinotHelixResourceManager = setupSegmentMetadata(tableConfig, now, initialNumSegments, removedSegments); setupPinotHelixResourceManager(tableConfig, removedSegments, pinotHelixResourceManager); ControllerConf conf = new ControllerConf(); ControllerMetrics controllerMetrics = new ControllerMetrics(new MetricsRegistry()); conf.setRetentionControllerFrequencyInSeconds(0); conf.setDeletedSegmentsRetentionInDays(0); RetentionManager retentionManager = new RetentionManager(pinotHelixResourceManager, conf, controllerMetrics); retentionManager.init(); retentionManager.run(); SegmentDeletionManager deletionManager = pinotHelixResourceManager.getSegmentDeletionManager(); // Verify that the removeAgedDeletedSegments() method in deletion manager is actually called. verify(deletionManager, times(1)).removeAgedDeletedSegments(anyInt()); // Verify that the deleteSegments method is actually called. verify(pinotHelixResourceManager, times(1)).deleteSegments(anyString(), anyList()); }
private FakeLLRealtimeSegmentDataManager createFakeSegmentManager() throws Exception { LLCRealtimeSegmentZKMetadata segmentZKMetadata = createZkMetadata(); TableConfig tableConfig = createTableConfig(); InstanceZKMetadata instanceZKMetadata = new InstanceZKMetadata(); RealtimeTableDataManager tableDataManager = createTableDataManager(); String resourceDir = _segmentDir; Schema schema = Schema.fromString(makeSchema()); ServerMetrics serverMetrics = new ServerMetrics(new MetricsRegistry()); FakeLLRealtimeSegmentDataManager segmentDataManager = new FakeLLRealtimeSegmentDataManager(segmentZKMetadata, tableConfig, instanceZKMetadata, tableDataManager, resourceDir, schema, serverMetrics); return segmentDataManager; }
private void setupRealtimeTable() throws IOException { // Set up the realtime table. Map<String, String> streamConfigs = new HashMap<>(); streamConfigs.put("streamType", "kafka"); streamConfigs.put("stream.kafka.consumer.type", "highLevel"); streamConfigs.put("stream.kafka.topic.name", "kafkaTopic"); streamConfigs .put("stream.kafka.decoder.class.name", "org.apache.pinot.core.realtime.impl.kafka.KafkaAvroMessageDecoder"); streamConfigs.put("stream.kafka.hlc.zk.connect.string", "localhost:1111/zkConnect"); streamConfigs.put("stream.kafka.decoder.prop.schema.registry.rest.url", "http://localhost:2222/schemaRegistry"); TableConfig realtimeTimeConfig = new TableConfig.Builder(CommonConstants.Helix.TableType.REALTIME).setTableName(RAW_DINING_TABLE_NAME) .setTimeColumnName("timeColumn").setTimeType("DAYS"). setStreamConfigs(streamConfigs).build(); Schema schema = new Schema(); schema.setSchemaName(RAW_DINING_TABLE_NAME); _pinotResourceManager.addOrUpdateSchema(schema); // Fake an PinotLLCRealtimeSegmentManager instance: required for a realtime table creation. PinotLLCRealtimeSegmentManager .create(_pinotResourceManager, new ControllerConf(), new ControllerMetrics(new MetricsRegistry())); _pinotResourceManager.addTable(realtimeTimeConfig); _helixBrokerStarter.getHelixExternalViewBasedRouting() .markDataResourceOnline(realtimeTimeConfig, null, new ArrayList<InstanceConfig>()); }