/** * Sets idPrefix for an entity. * <p> * <b>Note</b>: Entities will be stored in the order of idPrefix specified. * If users decide to set idPrefix for an entity, they <b>MUST</b> provide * the same prefix for every update of this entity. * </p> * Example: <blockquote><pre> * TimelineEntity entity = new TimelineEntity(); * entity.setIdPrefix(value); * </pre></blockquote> * Users can use {@link TimelineServiceHelper#invertLong(long)} to invert * the prefix if necessary. * * @param entityIdPrefix prefix for an entity. */ @JsonSetter("idprefix") public void setIdPrefix(long entityIdPrefix) { if (real == null) { this.idPrefix = entityIdPrefix; } else { real.setIdPrefix(entityIdPrefix); } } }
private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity createTaskAttemptEntity(HistoryEvent event, long timestamp, String taskAttemptId, String entityType, String relatedTaskEntity, String taskId, boolean setCreatedTime, long taskAttemptIdPrefix) { org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = createBaseEntity(event, timestamp, entityType, setCreatedTime); entity.setId(taskAttemptId); entity.addIsRelatedToEntity(relatedTaskEntity, taskId); entity.setIdPrefix(taskAttemptIdPrefix); return entity; }
@SuppressWarnings("unchecked") private void publishContainerFinishedEvent(ContainerStatus containerStatus, long containerFinishTime, long containerStartTime) { ContainerId containerId = containerStatus.getContainerId(); TimelineEntity entity = createContainerEntity(containerId); Map<String, Object> entityInfo = new HashMap<String, Object>(); entityInfo.put(ContainerMetricsConstants.DIAGNOSTICS_INFO, containerStatus.getDiagnostics()); entityInfo.put(ContainerMetricsConstants.EXIT_STATUS_INFO, containerStatus.getExitStatus()); entityInfo.put(ContainerMetricsConstants.STATE_INFO, ContainerState.COMPLETE.toString()); entityInfo.put(ContainerMetricsConstants.CONTAINER_FINISHED_TIME, containerFinishTime); entity.setInfo(entityInfo); TimelineEvent tEvent = new TimelineEvent(); tEvent.setId(ContainerMetricsConstants.FINISHED_EVENT_TYPE); tEvent.setTimestamp(containerFinishTime); entity.addEvent(tEvent); entity.setIdPrefix(TimelineServiceHelper.invertLong(containerStartTime)); dispatcher.getEventHandler().handle(new TimelinePublishEvent(entity, containerId.getApplicationAttemptId().getApplicationId())); }
event.setTimestamp(ts); entity.addEvent(event); entity.setIdPrefix( TimelineServiceHelper.invertLong(appAttemptID.getAttemptId()));
private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity createTaskEntity(HistoryEvent event, long timestamp, String taskId, String entityType, String relatedJobEntity, JobId jobId, boolean setCreatedTime, long taskIdPrefix) { org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = createBaseEntity(event, timestamp, entityType, setCreatedTime); entity.setId(taskId); if (event.getEventType() == EventType.TASK_STARTED) { entity.addInfo("TASK_TYPE", ((TaskStartedEvent)event).getTaskType().toString()); } entity.addIsRelatedToEntity(relatedJobEntity, jobId.toString()); entity.setIdPrefix(taskIdPrefix); return entity; }
tEvent.setTimestamp(event.getTimestamp()); entity.addEvent(tEvent); entity.setIdPrefix(TimelineServiceHelper. invertLong(container.getContainerStartTime()));
entity.setIdPrefix( TimelineServiceHelper.invertLong(attemptId.getAttemptId()));
event.addInfo(DIAGNOSTICS, container.getDiagnostics()); entity.addEvent(event); entity.setIdPrefix(TimelineServiceHelper.invertLong(containerStartTime));
event.addInfo("Resources", container.getResource().toString()); entity.addEvent(event); entity.setIdPrefix(TimelineServiceHelper.invertLong(startTime));
entity.setType(parseRowKey.getEntityType()); entity.setId(parseRowKey.getEntityId()); entity.setIdPrefix(parseRowKey.getEntityIdPrefix().longValue());
entity.setType(parseRowKey.getEntityType()); entity.setId(parseRowKey.getEntityId()); entity.setIdPrefix(parseRowKey.getEntityIdPrefix().longValue());