@Override public MetricsContext load(Map<String, String> key) throws Exception { return metricsContext.childContext(key); } });
@Override public MetricsContext load(Map<String, String> key) throws Exception { return metricsContext.childContext(key); } });
@Override public MetricsContext childContext(String tagName, String tagValue) { return metricsContext.childContext(tagName, tagValue); }
@Override public MetricsContext childContext(Map<String, String> tags) { return metricsContext.childContext(tags); }
@Override public MetricsContext childContext(Map<String, String> tags) { return metricsContext.childContext(tags); }
public WorkflowMetrics(MetricsContext programMetricsContext, MetricsContext workflowMetricsContext) { this.programMetricsContext = programMetricsContext.childContext(Constants.Metrics.Tag.SCOPE, "user"); this.workflowMetricsContext = workflowMetricsContext.childContext(Constants.Metrics.Tag.SCOPE, "user"); }
@Override public MetricsContext childContext(String tagName, String tagValue) { return metricsContext.childContext(tagName, tagValue); }
public WorkflowMetrics(MetricsContext programMetricsContext, MetricsContext workflowMetricsContext) { this.programMetricsContext = programMetricsContext.childContext(Constants.Metrics.Tag.SCOPE, "user"); this.workflowMetricsContext = workflowMetricsContext.childContext(Constants.Metrics.Tag.SCOPE, "user"); }
public ProgramUserMetrics(MetricsContext metricsContext) { this.metricsContext = metricsContext.childContext(Constants.Metrics.Tag.SCOPE, "user"); }
public ProgramUserMetrics(MetricsContext metricsContext) { this.metricsContext = metricsContext.childContext(Constants.Metrics.Tag.SCOPE, "user"); }
@Override public MetricsContext load(String key) throws Exception { return getProgramMetrics().childContext(Constants.Metrics.Tag.FLOWLET_QUEUE, key); } });
public MapReduceMetricsWriter(Job jobConf, BasicMapReduceContext context) { this.jobConf = jobConf; this.mapperMetrics = context.getProgramMetrics().childContext(Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Mapper.getId()); this.reducerMetrics = context.getProgramMetrics().childContext(Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Reducer.getId()); }
public MapReduceMetricsWriter(Job jobConf, BasicMapReduceContext context) { this.jobConf = jobConf; this.mapperMetrics = context.getProgramMetrics().childContext(Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Mapper.getId()); this.reducerMetrics = context.getProgramMetrics().childContext(Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Reducer.getId()); }
@Override public MetricsContext load(ImmutablePair<String, String> key) throws Exception { return getProgramMetrics() .childContext(ImmutableMap.of( Constants.Metrics.Tag.PRODUCER, key.getFirst(), Constants.Metrics.Tag.FLOWLET_QUEUE, key.getSecond(), Constants.Metrics.Tag.CONSUMER, BasicFlowletContext.this.flowletId.getFlowlet())); } });
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@Override protected List<HandlerDelegatorContext> createDelegatorContexts() throws Exception { // Constructs all handler delegator. It is for bridging ServiceHttpHandler and HttpHandler (in netty-http). List<HandlerDelegatorContext> delegatorContexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (HttpServiceHandlerSpecification handlerSpec : serviceSpecification.getHandlers().values()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(handlerSpec.getClassName()); @SuppressWarnings("unchecked") TypeToken<HttpServiceHandler> type = TypeToken.of((Class<HttpServiceHandler>) handlerClass); MetricsContext metrics = context.getProgramMetrics().childContext( BasicHttpServiceContext.createMetricsTags(handlerSpec, getInstanceId())); delegatorContexts.add(new HandlerDelegatorContext(type, instantiatorFactory, handlerSpec, contextFactory, metrics)); } return delegatorContexts; }
/** * Creates a new instance of a dataset based on the given information. */ private Dataset createDatasetInstance(DatasetCacheKey key, boolean recordLineage) { DatasetId datasetId = new DatasetId(key.getNamespace(), key.getName()); Dataset dataset = instantiator.getDataset(datasetId, key.getArguments(), key.getAccessType()); if (dataset instanceof MeteredDataset && metricsContext != null) { ((MeteredDataset) dataset).setMetricsCollector( metricsContext.childContext(Constants.Metrics.Tag.DATASET, key.getName())); } if (recordLineage) { instantiator.writeLineage(datasetId, key.getAccessType()); } return dataset; }
/** * Creates a new instance of a dataset based on the given information. */ private Dataset createDatasetInstance(DatasetCacheKey key, boolean recordLineage) { DatasetId datasetId = new DatasetId(key.getNamespace(), key.getName()); Dataset dataset = instantiator.getDataset(datasetId, key.getArguments(), key.getAccessType()); if (dataset instanceof MeteredDataset && metricsContext != null) { ((MeteredDataset) dataset).setMetricsCollector( metricsContext.childContext(Constants.Metrics.Tag.DATASET, key.getName())); } if (recordLineage) { instantiator.writeLineage(datasetId, key.getAccessType()); } return dataset; }