/** * Set UID in info which will be then used for query by UI. * @param uidKey key for UID in info. * @param uId UID to be set for the key. */ public void setUID(String uidKey, String uId) { if (real == null) { info.put(uidKey, uId); } else { real.addInfo(uidKey, uId); } }
public void addInfo(Map<String, Object> entityInfos) { if (real == null) { this.info.putAll(entityInfos); } else { real.addInfo(entityInfos); } }
public void addInfo(String key, Object value) { if (real == null) { info.put(key, value); } else { real.addInfo(key, value); } }
/** * Helper method for reading key-value pairs for either info or config. * * @param <T> Describes the type of column prefix. * @param entity entity to fill. * @param result result from HBase. * @param prefix column prefix. * @param isConfig if true, means we are reading configs, otherwise info. * @throws IOException if any problem is encountered while reading result. */ protected <T extends BaseTable<T>> void readKeyValuePairs( TimelineEntity entity, Result result, ColumnPrefix<T> prefix, boolean isConfig) throws IOException { // info and configuration are of type Map<String, Object or String> Map<String, Object> columns = ColumnRWHelper.readResults(result, prefix, stringKeyConverter); if (isConfig) { for (Map.Entry<String, Object> column : columns.entrySet()) { entity.addConfig(column.getKey(), column.getValue().toString()); } } else { entity.addInfo(columns); } } }
private void createContainerEntities() { TimelineEntity timelineEntity = generateEntity(TimelineEntityType.YARN_APPLICATION.toString(), appId.toString()); timelineEntity.addInfo(ApplicationMetricsConstants.USER_ENTITY_INFO, user); timelineEntity .addInfo(ApplicationMetricsConstants.STATE_EVENT_INFO, "RUNNING"); entities.put(appId.toString(), timelineEntity); }
private void publishContainerStartFailedEventOnTimelineServiceV2( final ContainerId containerId, String diagnostics) { final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = new org.apache.hadoop.yarn.api.records.timelineservice. TimelineEntity(); entity.setId(containerId.toString()); entity.setType(DSEntity.DS_CONTAINER.toString()); entity.addInfo("user", appSubmitterUgi.getShortUserName()); org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = new org.apache.hadoop.yarn.api.records.timelineservice .TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setId(DSEvent.DS_CONTAINER_END.toString()); event.addInfo(DIAGNOSTICS, diagnostics); entity.addEvent(event); try { appSubmitterUgi.doAs((PrivilegedExceptionAction<Object>) () -> { timelineV2Client.putEntitiesAsync(entity); return null; }); } catch (Exception e) { LOG.error("Container start failed event could not be published for {}", containerId, e instanceof UndeclaredThrowableException ? e.getCause() : e); } }
private void createAppEntities() { TimelineEntity timelineEntity = generateEntity(TimelineEntityType.YARN_CONTAINER.toString(), cId.toString()); timelineEntity .addInfo(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, nodeHttpAddress); entities.put(cId.toString(), timelineEntity); }
entity.setCreatedTime(ts); entity.addInfo("user", appSubmitterUgi.getShortUserName()); org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent();
private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity createTaskEntity(HistoryEvent event, long timestamp, String taskId, String entityType, String relatedJobEntity, JobId jobId, boolean setCreatedTime, long taskIdPrefix) { org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = createBaseEntity(event, timestamp, entityType, setCreatedTime); entity.setId(taskId); if (event.getEventType() == EventType.TASK_STARTED) { entity.addInfo("TASK_TYPE", ((TaskStartedEvent)event).getTaskType().toString()); } entity.addIsRelatedToEntity(relatedJobEntity, jobId.toString()); entity.setIdPrefix(taskIdPrefix); return entity; }
entity.setType(DSEntity.DS_CONTAINER.toString()); entity.addInfo("user", appSubmitterUgi.getShortUserName()); org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent();
entity.setType(DSEntity.DS_CONTAINER.toString()); entity.setCreatedTime(startTime); entity.addInfo("user", appSubmitterUgi.getShortUserName());
entity.setType("test type 1"); entity.setId("test id 1"); entity.addInfo("test info key 1", "test info value 1"); entity.addInfo("test info key 2", Arrays.asList("test info value 2", "test info value 3")); entity.addInfo("test info key 3", true); Assert.assertTrue( entity.getInfo().get("test info key 3") instanceof Boolean);
info1.put("info1", "val1"); info1.put("info2", "val5"); entity11.addInfo(info1); TimelineEvent event = new TimelineEvent(); event.setId("event_1"); Map<String, Object> info2 = new HashMap<String, Object>(); info1.put("info2", 4); entity2.addInfo(info2); Map<String, String> configs2 = new HashMap<String, String>(); configs2.put("config_1", "129"); info3.put("info2", 3.5); info3.put("info4", 20); entity3.addInfo(info3); Map<String, String> configs3 = new HashMap<String, String>(); configs3.put("config_1", "123");