List of usage examples for org.apache.hadoop.yarn.api.records.timeline TimelineEntity getEntityId
@XmlElement(name = "entity") public String getEntityId()
From source file:org.apache.tez.dag.history.logging.ats.TestHistoryEventTimelineConversion.java
License:Apache License
@Test(timeout = 5000) public void testConvertVertexFinishedEvent() { long initRequestedTime = random.nextLong(); long initedTime = random.nextLong(); long startRequestedTime = random.nextLong(); long startTime = random.nextLong(); long finishTime = random.nextLong(); Map<String, Integer> taskStats = new HashMap<String, Integer>(); taskStats.put("FOO", 100); taskStats.put("BAR", 200); VertexStats vertexStats = new VertexStats(); VertexFinishedEvent event = new VertexFinishedEvent(tezVertexID, "v1", 1, initRequestedTime, initedTime, startRequestedTime, startTime, finishTime, VertexState.ERROR, "diagnostics", null, vertexStats, taskStats);//from w w w . j a v a2 s . c o m TimelineEntity timelineEntity = HistoryEventTimelineConversion.convertToTimelineEntity(event); Assert.assertEquals(EntityTypes.TEZ_VERTEX_ID.name(), timelineEntity.getEntityType()); Assert.assertEquals(tezVertexID.toString(), timelineEntity.getEntityId()); Assert.assertEquals(0, timelineEntity.getRelatedEntities().size()); Assert.assertEquals(3, timelineEntity.getPrimaryFilters().size()); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(ATSConstants.APPLICATION_ID) .contains(applicationId.toString())); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(EntityTypes.TEZ_DAG_ID.name()) .contains(tezDAGID.toString())); Assert.assertTrue( timelineEntity.getPrimaryFilters().get(ATSConstants.STATUS).contains(VertexState.ERROR.name())); Assert.assertEquals(1, timelineEntity.getEvents().size()); TimelineEvent timelineEvent = timelineEntity.getEvents().get(0); Assert.assertEquals(HistoryEventType.VERTEX_FINISHED.name(), timelineEvent.getEventType()); Assert.assertEquals(finishTime, timelineEvent.getTimestamp()); Assert.assertEquals(finishTime, ((Long) timelineEntity.getOtherInfo().get(ATSConstants.FINISH_TIME)).longValue()); Assert.assertEquals(finishTime - startTime, ((Long) timelineEntity.getOtherInfo().get(ATSConstants.TIME_TAKEN)).longValue()); Assert.assertEquals(VertexState.ERROR.name(), timelineEntity.getOtherInfo().get(ATSConstants.STATUS)); Assert.assertEquals("diagnostics", timelineEntity.getOtherInfo().get(ATSConstants.DIAGNOSTICS)); Assert.assertTrue(timelineEntity.getOtherInfo().containsKey(ATSConstants.STATS)); Assert.assertEquals(100, ((Integer) timelineEntity.getOtherInfo().get("FOO")).intValue()); Assert.assertEquals(200, ((Integer) timelineEntity.getOtherInfo().get("BAR")).intValue()); }
From source file:org.apache.tez.dag.history.logging.ats.TestHistoryEventTimelineConversion.java
License:Apache License
@Test(timeout = 5000) public void testConvertTaskStartedEvent() { long scheduleTime = random.nextLong(); long startTime = random.nextLong(); TaskStartedEvent event = new TaskStartedEvent(tezTaskID, "v1", scheduleTime, startTime); TimelineEntity timelineEntity = HistoryEventTimelineConversion.convertToTimelineEntity(event); Assert.assertEquals(EntityTypes.TEZ_TASK_ID.name(), timelineEntity.getEntityType()); Assert.assertEquals(tezTaskID.toString(), timelineEntity.getEntityId()); Assert.assertEquals(startTime, timelineEntity.getStartTime().longValue()); Assert.assertEquals(1, timelineEntity.getRelatedEntities().size()); Assert.assertTrue(timelineEntity.getRelatedEntities().get(EntityTypes.TEZ_VERTEX_ID.name()) .contains(tezVertexID.toString())); Assert.assertEquals(3, timelineEntity.getPrimaryFilters().size()); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(ATSConstants.APPLICATION_ID) .contains(applicationId.toString())); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(EntityTypes.TEZ_DAG_ID.name()) .contains(tezDAGID.toString())); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(EntityTypes.TEZ_VERTEX_ID.name()) .contains(tezVertexID.toString())); Assert.assertEquals(1, timelineEntity.getEvents().size()); TimelineEvent timelineEvent = timelineEntity.getEvents().get(0); Assert.assertEquals(HistoryEventType.TASK_STARTED.name(), timelineEvent.getEventType()); Assert.assertEquals(startTime, timelineEvent.getTimestamp()); Assert.assertTrue(timelineEntity.getOtherInfo().containsKey(ATSConstants.SCHEDULED_TIME)); Assert.assertTrue(timelineEntity.getOtherInfo().containsKey(ATSConstants.START_TIME)); Assert.assertEquals(scheduleTime,/* ww w . j ava 2s. c om*/ ((Long) timelineEntity.getOtherInfo().get(ATSConstants.SCHEDULED_TIME)).longValue()); Assert.assertEquals(startTime, ((Long) timelineEntity.getOtherInfo().get(ATSConstants.START_TIME)).longValue()); }
From source file:org.apache.tez.dag.history.logging.ats.TestHistoryEventTimelineConversion.java
License:Apache License
@Test(timeout = 5000) public void testConvertTaskAttemptStartedEvent() { long startTime = random.nextLong(); TaskAttemptStartedEvent event = new TaskAttemptStartedEvent(tezTaskAttemptID, "v1", startTime, containerId, nodeId, "inProgressURL", "logsURL", "nodeHttpAddress"); TimelineEntity timelineEntity = HistoryEventTimelineConversion.convertToTimelineEntity(event); Assert.assertEquals(EntityTypes.TEZ_TASK_ATTEMPT_ID.name(), timelineEntity.getEntityType()); Assert.assertEquals(tezTaskAttemptID.toString(), timelineEntity.getEntityId()); Assert.assertEquals(startTime, timelineEntity.getStartTime().longValue()); Assert.assertEquals(3, timelineEntity.getRelatedEntities().size()); Assert.assertTrue(/* w w w . j a v a 2s.c om*/ timelineEntity.getRelatedEntities().get(ATSConstants.NODE_ID).contains(nodeId.toString())); Assert.assertTrue(timelineEntity.getRelatedEntities().get(ATSConstants.CONTAINER_ID) .contains(containerId.toString())); Assert.assertTrue(timelineEntity.getRelatedEntities().get(EntityTypes.TEZ_TASK_ID.name()) .contains(tezTaskID.toString())); Assert.assertEquals(1, timelineEntity.getEvents().size()); TimelineEvent timelineEvent = timelineEntity.getEvents().get(0); Assert.assertEquals(HistoryEventType.TASK_ATTEMPT_STARTED.name(), timelineEvent.getEventType()); Assert.assertEquals(startTime, timelineEvent.getTimestamp()); Assert.assertEquals(4, timelineEntity.getPrimaryFilters().size()); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(ATSConstants.APPLICATION_ID) .contains(applicationId.toString())); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(EntityTypes.TEZ_DAG_ID.name()) .contains(tezDAGID.toString())); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(EntityTypes.TEZ_VERTEX_ID.name()) .contains(tezVertexID.toString())); Assert.assertTrue(timelineEntity.getPrimaryFilters().get(EntityTypes.TEZ_TASK_ID.name()) .contains(tezTaskID.toString())); Assert.assertTrue(timelineEntity.getOtherInfo().containsKey(ATSConstants.START_TIME)); Assert.assertEquals("inProgressURL", timelineEntity.getOtherInfo().get(ATSConstants.IN_PROGRESS_LOGS_URL)); Assert.assertEquals("logsURL", timelineEntity.getOtherInfo().get(ATSConstants.COMPLETED_LOGS_URL)); Assert.assertEquals(nodeId.toString(), timelineEntity.getOtherInfo().get(ATSConstants.NODE_ID)); Assert.assertEquals(containerId.toString(), timelineEntity.getOtherInfo().get(ATSConstants.CONTAINER_ID)); Assert.assertEquals("nodeHttpAddress", timelineEntity.getOtherInfo().get(ATSConstants.NODE_HTTP_ADDRESS)); }
From source file:org.apache.tez.dag.history.logging.ats.TestHistoryEventTimelineConversion.java
License:Apache License
@Test(timeout = 5000) public void testConvertTaskFinishedEvent() { String vertexName = "testVertexName"; long startTime = random.nextLong(); long finishTime = random.nextLong(); TaskState state = TaskState.values()[random.nextInt(TaskState.values().length)]; String diagnostics = "diagnostics message"; TezCounters counters = new TezCounters(); TaskFinishedEvent event = new TaskFinishedEvent(tezTaskID, vertexName, startTime, finishTime, tezTaskAttemptID, state, diagnostics, counters); TimelineEntity timelineEntity = HistoryEventTimelineConversion.convertToTimelineEntity(event); Assert.assertEquals(tezTaskID.toString(), timelineEntity.getEntityId()); Assert.assertEquals(EntityTypes.TEZ_TASK_ID.name(), timelineEntity.getEntityType()); final Map<String, Set<Object>> primaryFilters = timelineEntity.getPrimaryFilters(); Assert.assertEquals(4, primaryFilters.size()); Assert.assertTrue(primaryFilters.get(ATSConstants.APPLICATION_ID).contains(applicationId.toString())); Assert.assertTrue(primaryFilters.get(EntityTypes.TEZ_DAG_ID.name()).contains(tezDAGID.toString())); Assert.assertTrue(primaryFilters.get(EntityTypes.TEZ_VERTEX_ID.name()).contains(tezVertexID.toString())); Assert.assertTrue(primaryFilters.get(ATSConstants.STATUS).contains(state.name())); Assert.assertEquals(1, timelineEntity.getEvents().size()); TimelineEvent evt = timelineEntity.getEvents().get(0); Assert.assertEquals(HistoryEventType.TASK_FINISHED.name(), evt.getEventType()); Assert.assertEquals(finishTime, evt.getTimestamp()); final Map<String, Object> otherInfo = timelineEntity.getOtherInfo(); Assert.assertEquals(6, otherInfo.size()); Assert.assertEquals(finishTime, otherInfo.get(ATSConstants.FINISH_TIME)); Assert.assertEquals(finishTime - startTime, otherInfo.get(ATSConstants.TIME_TAKEN)); Assert.assertEquals(state.name(), otherInfo.get(ATSConstants.STATUS)); Assert.assertEquals(tezTaskAttemptID.toString(), otherInfo.get(ATSConstants.SUCCESSFUL_ATTEMPT_ID)); Assert.assertEquals(diagnostics, otherInfo.get(ATSConstants.DIAGNOSTICS)); Assert.assertTrue(otherInfo.containsKey(ATSConstants.COUNTERS)); }
From source file:org.apache.tez.dag.history.logging.ats.TestHistoryEventTimelineConversion.java
License:Apache License
@Test(timeout = 5000) public void testConvertVertexParallelismUpdatedEvent() { TezVertexID vId = tezVertexID;/*from w w w . j av a 2 s .c om*/ Map<String, EdgeManagerPluginDescriptor> edgeMgrs = new HashMap<String, EdgeManagerPluginDescriptor>(); edgeMgrs.put("a", EdgeManagerPluginDescriptor.create("a.class").setHistoryText("text")); VertexParallelismUpdatedEvent event = new VertexParallelismUpdatedEvent(vId, 1, null, edgeMgrs, null, 10); TimelineEntity timelineEntity = HistoryEventTimelineConversion.convertToTimelineEntity(event); Assert.assertEquals(ATSConstants.TEZ_VERTEX_ID, timelineEntity.getEntityType()); Assert.assertEquals(vId.toString(), timelineEntity.getEntityId()); Assert.assertEquals(1, timelineEntity.getEvents().size()); final Map<String, Set<Object>> primaryFilters = timelineEntity.getPrimaryFilters(); Assert.assertEquals(2, primaryFilters.size()); Assert.assertTrue(primaryFilters.get(ATSConstants.APPLICATION_ID).contains(applicationId.toString())); Assert.assertTrue(primaryFilters.get(EntityTypes.TEZ_DAG_ID.name()).contains(tezDAGID.toString())); TimelineEvent evt = timelineEntity.getEvents().get(0); Assert.assertEquals(HistoryEventType.VERTEX_PARALLELISM_UPDATED.name(), evt.getEventType()); Assert.assertEquals(1, evt.getEventInfo().get(ATSConstants.NUM_TASKS)); Assert.assertEquals(10, evt.getEventInfo().get(ATSConstants.OLD_NUM_TASKS)); Assert.assertNotNull(evt.getEventInfo().get(ATSConstants.UPDATED_EDGE_MANAGERS)); Map<String, Object> updatedEdgeMgrs = (Map<String, Object>) evt.getEventInfo() .get(ATSConstants.UPDATED_EDGE_MANAGERS); Assert.assertEquals(1, updatedEdgeMgrs.size()); Assert.assertTrue(updatedEdgeMgrs.containsKey("a")); Map<String, Object> updatedEdgeMgr = (Map<String, Object>) updatedEdgeMgrs.get("a"); Assert.assertEquals("a.class", updatedEdgeMgr.get(DAGUtils.EDGE_MANAGER_CLASS_KEY)); Assert.assertEquals(1, timelineEntity.getOtherInfo().get(ATSConstants.NUM_TASKS)); }
From source file:org.Microsoft.Telemetry.ServiceInformation.java
/** * This method Getting TimelineEntity type of Tez job And send information * as Telemetry to Application Insights/*w w w .ja va2 s . co m*/ * * @param entity ,Timeline Entity as type {@link TimelineEntity} */ private void Send_Tez_Telemetry(TimelineEntity entity) throws Exception { try { Map<String, String> properties = new HashMap<String, String>(); Map<String, Double> metrics = new HashMap<String, Double>(); EventTelemetry eventtelemetry = new EventTelemetry(); List<TraceTelemetry> GroupCounters = new ArrayList<TraceTelemetry>(); String Event_Name = ""; String job_type = "TEZ"; String job_status = ""; String task_id = ""; String even_name = ""; long startTime = 0; long endTime = 0; int timeTaken = 0; if (entity.getEntityId().matches("dag(.*)")) { task_id = entity.getEntityId().replaceFirst("dag", "Application"); } else if (entity.getEntityId().matches("vertex(.*)")) { task_id = entity.getEntityId().replaceFirst("vertex", "task"); } else if (entity.getEntityId().matches("task(.*)")) { task_id = entity.getEntityId().replaceFirst("task", "Application"); } if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); for (TimelineEvent event : events) { Event_Name = event.getEventType(); LOG.info(PATTERN_LOG_INFO + String.format(" Event Type %s Entity ID :%s Entity Type : %s :", event.getEventType(), entity.getEntityId(), entity.getEntityType())); } } else { LOG.info(PATTERN_LOG_INFO + "No information about the event "); } if (entity.getOtherInfo().get("config") != null) { Map<String, String> config = (LinkedHashMap) entity.getOtherInfo().get("config"); if (config != null) { String value = ""; String value_from_config = null; for (Map.Entry<String, String> entrySet : dimension_from_config.entrySet()) { value = entrySet.getValue(); value_from_config = config.get(value); if (value_from_config != null) { dimension_to_sending.put(value, value_from_config); } else { dimension_to_sending.put(value, "not found"); } } } } //|| Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED") if (Event_Name.equals("DAG_STARTED") || Event_Name.equals("DAG_FINISHED") || Event_Name.equals("VERTEX_STARTED") || Event_Name.equals("VERTEX_FINISHED")) { if (Event_Name.equals("DAG_STARTED")) { job_id = entity.getEntityId().replaceFirst("dag", "Application"); String[] arr = job_id.split("_"); if (arr.length >= 3) { job_id = String.format("%s_%s_%s", arr[0], arr[1], arr[2]); } } if (entity.getOtherInfo() != null) { Map<String, Object> OtherInfo = entity.getOtherInfo(); Set set = entity.getOtherInfo().entrySet(); if (set.size() > 0) { WriteToFileLog("Start List of Other Info of ( ", String.format("Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); } Iterator iter = set.iterator(); while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); switch ((String) me.getKey()) { case "status": if (Event_Name.equals("DAG_STARTED") || Event_Name.equals("DAG_FINISHED") || Event_Name.equals("VERTEX_STARTED") || Event_Name.equals("VERTEX_FINISHED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED")) { job_status = (String) me.getValue(); if (Event_Name.equals("DAG_FINISHED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("VERTEX_FINISHED")) { properties.put("job_status", job_status); } } break; case "endTime": case "startTime": case "timeTaken": if (((String) me.getKey()).equals("startTime") && (!Event_Name.equals("DAG_FINISHED"))) { startTime = (long) me.getValue(); eventtelemetry.setTimestamp(new Date(startTime)); } else if (((String) me.getKey()).equals("endTime")) { endTime = (long) me.getValue(); eventtelemetry.setTimestamp(new Date(endTime)); } else if (((String) me.getKey()).equals("timeTaken")) { timeTaken = (int) me.getValue(); double result = (double) timeTaken / 1000; metrics.put("Duration", result); } break; case "stats": if (me.getValue() instanceof LinkedHashMap) { LOG.info(PATTERN_LOG_INFO + String.format("tha value of stats : %s", me.getValue().getClass().getName())); // printLinkedHashMap(value); Map<Object, Object> linkedHashMap = (LinkedHashMap) me.getValue(); for (Map.Entry<Object, Object> entrySet : linkedHashMap.entrySet()) { Object key = entrySet.getKey(); Object value1 = entrySet.getValue(); LOG.info(PATTERN_LOG_INFO + String.format(" key type %s : value type %s", (String) key, value1.getClass().getName())); if (value1 instanceof Integer) { LOG.info(PATTERN_LOG_INFO + String.format(" key(Integer) %s : value %d", (String) key, (int) value1)); } else if (value1 instanceof Double) { LOG.info(PATTERN_LOG_INFO + String.format(" key(Double) %s : value %s", (String) key, Double.toString((double) value1))); } else if (value1 instanceof Long) { LOG.info(PATTERN_LOG_INFO + String.format(" key(Long) %s : value %s", (String) key, CastFromTimeToStringFormat((long) value1))); } if (value1 instanceof ArrayList) { if (!((ArrayList) value1).isEmpty()) { int i = 1; Object[] ob = ((ArrayList) value1).toArray(); for (Object part : ob) { if (part instanceof String) { LOG.info(PATTERN_LOG_INFO + String.format(" tha part %d of ArrayList type : %s", i++, (String) part)); } } } } } } break; case "counters": if (me.getValue() instanceof LinkedHashMap) { Map<Object, Object> Map = (LinkedHashMap) me.getValue(); Object key; Object value1; for (Map.Entry<Object, Object> entrySet : Map.entrySet()) { key = entrySet.getKey(); value1 = entrySet.getValue(); if (value1 instanceof ArrayList) { if (!((ArrayList) value1).isEmpty()) { Object[] ob = ((ArrayList) value1).toArray(); for (Object part : ob) { if (part instanceof LinkedHashMap) { Map<Object, Object> linkedHashMap = (LinkedHashMap) part; TraceTelemetry tracetelemetry = new TraceTelemetry( (String) linkedHashMap.get("counterGroupDisplayName"), SeverityLevel.Information); tracetelemetry.getContext().getOperation().setId(task_id); for (Map.Entry<Object, Object> entrySetnext : linkedHashMap .entrySet()) { Object key2 = entrySetnext.getKey(); Object value2 = entrySetnext.getValue(); if ((key2 instanceof String) && (value2 instanceof ArrayList)) { Object[] ob1 = ((ArrayList) value2).toArray(); for (Object part1 : ob1) { if (part1 instanceof LinkedHashMap) { Map<Object, Object> linkedHashMap2 = (LinkedHashMap) part1; if (linkedHashMap2.get( "counterValue") instanceof Integer) { tracetelemetry.getProperties().put( (String) linkedHashMap2 .get("counterName"), Integer.toString( (int) linkedHashMap2.get( "counterValue"))); switch ((String) linkedHashMap2 .get("counterName")) { case "NUM_SUCCEEDED_TASKS": case "TOTAL_LAUNCHED_TASKS": metrics.put( (String) linkedHashMap2 .get("counterName"), ((Integer) linkedHashMap2 .get("counterValue")) .doubleValue()); break; // case "FILE_BYTES_READ": case "WASB_BYTES_READ": case "HDFS_BYTES_READ": metrics.put("BYTES READ", ((Integer) linkedHashMap2 .get("counterValue")) .doubleValue()); break; //case "FILE_BYTES_WRITTEN": case "WASB_BYTES_WRITTEN": case "HDFS_BYTES_WRITTEN": metrics.put("BYTES WRITTEN", ((Integer) linkedHashMap2 .get("counterValue")) .doubleValue()); break; case "INPUT_RECORDS_PROCESSED": metrics.put("RECORDS READ", ((Integer) linkedHashMap2 .get("counterValue")) .doubleValue()); break; case "OUTPUT_RECORDS": metrics.put("RECORDS WRITTEN", ((Integer) linkedHashMap2 .get("counterValue")) .doubleValue()); break; default: break; } } else if (linkedHashMap2 .get("counterValue") instanceof Long) { tracetelemetry.getProperties().put( (String) linkedHashMap2 .get("counterName"), Long.toString((long) linkedHashMap2 .get("counterValue"))); } else if (linkedHashMap2.get( "counterValue") instanceof Double) { tracetelemetry.getProperties().put( (String) linkedHashMap2 .get("counterName"), Double.toString( (double) linkedHashMap2.get( "counterValue"))); } } } } } GroupCounters.add(tracetelemetry); } } } } } } else { LOG.info(PATTERN_LOG_INFO + String.format("The data type %s is not supported in this version", me.getValue().getClass().getName())); } break; default: break; } } } else { LOG.info(PATTERN_LOG_INFO + "No information about the Other Info "); } if (Event_Name.equals("DAG_STARTED") || Event_Name.equals("DAG_FINISHED") || Event_Name.equals("VERTEX_STARTED") || Event_Name.equals("VERTEX_FINISHED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED")) { switch (Event_Name) { case "DAG_STARTED": even_name = "job_started"; break; case "DAG_FINISHED": even_name = "job_finished"; for (Map.Entry<String, String> entrySet : dimension_to_sending.entrySet()) { properties.put(entrySet.getKey(), entrySet.getValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", entrySet.getKey(), entrySet.getValue())); } dimension_to_sending.clear(); break; case "VERTEX_STARTED": case "TASK_STARTED": even_name = "task_started"; break; case "VERTEX_FINISHED": case "TASK_FINISHED": even_name = "task_finished"; break; default: break; } LOG.info(String.format(PATTERN_LOG_INFO + "job_id is %s", job_id)); LOG.info(String.format(PATTERN_LOG_INFO + "operation_id is %s", task_id)); LOG.info(String.format(PATTERN_LOG_INFO + "job_type is %s", job_type)); properties.put("job_id", job_id); properties.put("operation_id", task_id); properties.put("job_type", job_type); for (Map.Entry<String, String> entrySet : dimension_to_sending.entrySet()) { properties.put(entrySet.getKey(), entrySet.getValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", entrySet.getKey(), entrySet.getValue())); } eventtelemetry.getMetrics().putAll(metrics); eventtelemetry.getProperties().putAll(properties); eventtelemetry.setName(even_name); eventtelemetry.getContext().getOperation().setId(task_id); telemetry.trackEvent(eventtelemetry); for (TraceTelemetry GroupCounter : GroupCounters) { telemetry.trackTrace(GroupCounter); } //telemetry.trackEvent(even_name, properties, metrics); } } } catch (Exception e) { String message = PATTERN_LOG_ERROR + "Creating a problem while send telemetry to Applocation insights......"; LOG.error(message, e); throw new Exception(message); } }
From source file:org.Microsoft.Telemetry.ServiceInformation.java
/** * This method Getting TimelineEntity type of Mapreduce job And send * information as Telemetry to Application Insights * * @param entity ,Timeline Entity as type {@link TimelineEntity} *//*from ww w . jav a2s . co m*/ private void Send_Mapreduce_Telemetry(TimelineEntity entity) throws Exception { try { Map<String, String> properties = new HashMap<String, String>(); Map<String, Double> metrics = new HashMap<String, Double>(); EventTelemetry eventtelemetry = new EventTelemetry(); List<TraceTelemetry> GroupCounters = new ArrayList<TraceTelemetry>(); List<ExceptionTelemetry> GroupExceptions = new ArrayList<ExceptionTelemetry>(); String Event_Name = ""; String job_type = "MAPREDUCE"; String job_status = ""; String task_id = ""; String even_name = ""; Boolean if_send_event = true; if (entity.getEntityId().matches("job(.*)")) { task_id = entity.getEntityId().replaceFirst("job", "Application"); } else if (entity.getEntityId().matches("task(.*)")) { task_id = entity.getEntityId().replaceFirst("task", "Application"); } if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); for (TimelineEvent event : events) { Event_Name = event.getEventType(); LOG.info(PATTERN_LOG_INFO + String.format("Event Type %s Entity ID :%s Entity Type : %s :", event.getEventType(), entity.getEntityId(), entity.getEntityType())); if (event.getEventInfo() != null && (Event_Name.equals("JOB_FAILED") || Event_Name.equals("TASK_FAILED"))) { Map<String, Object> eventInfo = event.getEventInfo(); if (Event_Name.equals("JOB_FAILED") && eventInfo.get("DIAGNOSTICS") != null && (((String) eventInfo.get("DIAGNOSTICS")).equals("") == false)) { LOG.info(PATTERN_LOG_INFO + "added diagnostics to tracetelemetry of job failed"); String message = (String) eventInfo.get("DIAGNOSTICS"); Exception e = new Exception(message); ExceptionTelemetry traceexception = new ExceptionTelemetry(e); traceexception.setSeverityLevel(SeverityLevel.Error); traceexception.getContext().getOperation().setId(task_id); GroupExceptions.add(traceexception); } if (Event_Name.equals("TASK_FAILED") && eventInfo.get("ERROR") != null && (((String) eventInfo.get("ERROR")).equals("") == false)) { LOG.info(PATTERN_LOG_INFO + "added error to tracetelemetry of task failed"); String message = (String) eventInfo.get("ERROR"); Exception e = new Exception(message); ExceptionTelemetry traceexception1 = new ExceptionTelemetry(e); ExceptionTelemetry traceexception2 = new ExceptionTelemetry(e); traceexception1.setSeverityLevel(SeverityLevel.Error); traceexception2.setSeverityLevel(SeverityLevel.Error); traceexception2.getContext().getOperation().setId(task_id); traceexception1.getContext().getOperation().setId(job_id); GroupExceptions.add(traceexception1); GroupExceptions.add(traceexception2); } } } } else { LOG.info(PATTERN_LOG_INFO + "No information about the event"); } if (Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("JOB_SUBMITTED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("TASK_FAILED")) { if (Event_Name.equals("JOB_SUBMITTED")) { job_id = entity.getEntityId().replaceFirst("job", "Application"); dimension_to_sending.clear(); if (dimension_from_config.size() > 0) { long startTime = System.currentTimeMillis(); dimension_to_sending = get_properties_values(dimension_from_config, job_id); long endTime = System.currentTimeMillis(); long duration = (endTime - startTime); LOG.info(PATTERN_LOG_INFO + String.format( "The duration of request http for information from server is %s (ms)", Long.toString(duration))); } } if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); for (TimelineEvent event : events) { Set set = event.getEventInfo().entrySet(); if (set.size() > 0) { WriteToFileLog("Start List of Other Info of ( ", String.format("Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); } Iterator iter = set.iterator(); while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); switch ((String) me.getKey()) { case "TASK_TYPE": if (Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("TASK_FAILED")) { properties.put("task_type", (String) me.getValue()); } break; case "STATUS": case "JOB_STATUS": if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("JOB_SUBMITTED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED")) { job_status = (String) me.getValue(); if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("TASK_FINISHED")) { properties.put("job_status", job_status); } } break; case "START_TIME": case "SUBMIT_TIME": case "FINISH_TIME": if (Event_Name.equals("TASK_STARTED") && ((String) me.getKey()).equals("START_TIME")) { times.put(entity.getEntityId(), (long) me.getValue()); eventtelemetry.setTimestamp(new Date((long) me.getValue())); } else if (Event_Name.equals("JOB_SUBMITTED") && ((String) me.getKey()).equals("SUBMIT_TIME")) { times.put(entity.getEntityId(), (long) me.getValue()); eventtelemetry.setTimestamp(new Date((long) me.getValue())); } else if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("JOB_FINISHED")) { if (!times.isEmpty()) { if (times.get(entity.getEntityId()) != null) { long start_time = times.get(entity.getEntityId()); long end_time = (long) me.getValue(); double Duration = get_SecondsDifferent(start_time, end_time); metrics.put("Duration", Duration); times.remove(entity.getEntityId()); eventtelemetry.setTimestamp(new Date(end_time)); } else { LOG.error(PATTERN_LOG_ERROR + String.format("not founf start time of task %s in map times ", entity.getEntityId())); if_send_event = false; } } else { LOG.error(PATTERN_LOG_ERROR + String.format( "not founf start time of task %s in map times the map is Empty", entity.getEntityId())); if_send_event = false; } } break; case "TOTAL_COUNTERS_GROUPS": case "COUNTERS_GROUPS": case "REDUCE_COUNTERS_GROUPS": case "MAP_COUNTERS_GROUPS": if (me.getValue() instanceof ArrayList) { if (!((ArrayList) me.getValue()).isEmpty()) { Object[] ob = ((ArrayList) me.getValue()).toArray(); for (Object part : ob) { if (part instanceof LinkedHashMap) { Map<Object, Object> linkedHashMap = (LinkedHashMap) part; TraceTelemetry tracetelemetry = null; if (((String) me.getKey()).equals("TOTAL_COUNTERS_GROUPS")) { tracetelemetry = new TraceTelemetry( "total job finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } else if (((String) me.getKey()) .equals("REDUCE_COUNTERS_GROUPS")) { tracetelemetry = new TraceTelemetry( "reduce job finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } else if (((String) me.getKey()).equals("MAP_COUNTERS_GROUPS")) { tracetelemetry = new TraceTelemetry( "map job finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } else { tracetelemetry = new TraceTelemetry( "task finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } tracetelemetry.getContext().getOperation().setId(task_id); for (Map.Entry<Object, Object> entrySet : linkedHashMap .entrySet()) { Object key = entrySet.getKey(); Object value1 = entrySet.getValue(); if ((key instanceof String) && (value1 instanceof ArrayList)) { Object[] ob1 = ((ArrayList) value1).toArray(); for (Object part1 : ob1) { if (part1 instanceof LinkedHashMap) { Map<Object, Object> linkedHashMap2 = (LinkedHashMap) part1; if (linkedHashMap2 .get("VALUE") instanceof Integer) { tracetelemetry.getProperties().put( (String) linkedHashMap2.get("NAME"), String.format("%d", (Integer) linkedHashMap2 .get("VALUE"))); if (((String) me.getKey()) .equals("TOTAL_COUNTERS_GROUPS")) { switch ((String) linkedHashMap2 .get("NAME")) { case "MAP_INPUT_RECORDS": metrics.put("RECORDS READ", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; case "REDUCE_OUTPUT_RECORDS": metrics.put("RECORDS WRITTEN", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; // case "FILE_BYTES_READ": case "WASB_BYTES_READ": case "HDFS_BYTES_READ": metrics.put("BYTES READ", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; //case "FILE_BYTES_WRITTEN": case "WASB_BYTES_WRITTEN": case "HDFS_BYTES_WRITTEN": metrics.put("BYTES WRITTEN", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; default: break; } } } } } } else if ((key instanceof String) && (value1 instanceof String)) { } else if ((key instanceof String) && (value1 instanceof Integer)) { } } GroupCounters.add(tracetelemetry); } } } } else { LOG.info(PATTERN_LOG_INFO + String.format("The data type %s is not supported in this version", me.getValue().getClass().getName())); } break; case "NUM_REDUCES": case "NUM_MAPS": case "FINISHED_MAPS": case "FINISHED_REDUCES": case "FAILED_REDUCES": case "FAILED_MAPS": metrics.put((String) me.getKey(), ((Integer) me.getValue()).doubleValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", (String) me.getKey(), ((Integer) me.getValue()).toString())); LOG2.info(PATTERN_LOG_INFO + String.format("%s is %s", (String) me.getKey(), ((Integer) me.getValue()).toString())); break; default: break; } } } } else { LOG.info(PATTERN_LOG_INFO + "No information about event "); } if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("JOB_SUBMITTED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED")) { switch (Event_Name) { case "JOB_SUBMITTED": even_name = "job_started"; break; case "JOB_FINISHED": case "JOB_FAILED": even_name = "job_finished"; for (Map.Entry<String, String> entrySet : dimension_to_sending.entrySet()) { properties.put(entrySet.getKey(), entrySet.getValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", entrySet.getKey(), entrySet.getValue())); } dimension_to_sending.clear(); if (metrics.get("NUM_REDUCES") != null && metrics.get("NUM_MAPS") != null && metrics.get("FINISHED_MAPS") != null && metrics.get("FINISHED_REDUCES") != null) { metrics.put("TOTAL_LAUNCHED_TASKS", metrics.get("NUM_REDUCES") + metrics.get("NUM_MAPS")); metrics.put("NUM_SUCCEEDED_TASKS", metrics.get("FINISHED_MAPS") + metrics.get("FINISHED_REDUCES")); //metrics.put("NUM_SUCCEEDED_TASKS", metrics.get("TOTAL_LAUNCHED_TASKS") - (metrics.get("FAILED_REDUCES") + metrics.get("FAILED_MAPS"))); LOG.info(PATTERN_LOG_INFO + String.format("TOTAL_LAUNCHED_TASKS is %s", metrics.get("TOTAL_LAUNCHED_TASKS").toString())); LOG.info(PATTERN_LOG_INFO + String.format("NUM_SUCCEEDED_TASKS is %s", metrics.get("NUM_SUCCEEDED_TASKS").toString())); LOG2.info(PATTERN_LOG_INFO + String.format("TOTAL_LAUNCHED_TASKS is %s", metrics.get("TOTAL_LAUNCHED_TASKS").toString())); LOG2.info(PATTERN_LOG_INFO + String.format("NUM_SUCCEEDED_TASKS is %s", metrics.get("NUM_SUCCEEDED_TASKS").toString())); } break; case "TASK_STARTED": even_name = "task_started"; break; case "TASK_FINISHED": case "TASK_FAILED": even_name = "task_finished"; break; default: break; } LOG.info(PATTERN_LOG_INFO + String.format("job_id is %s", job_id)); LOG.info(PATTERN_LOG_INFO + String.format("task_id is %s", task_id)); LOG.info(PATTERN_LOG_INFO + String.format("job_type is %s", job_type)); properties.put("job_id", job_id); properties.put("task_id", task_id); properties.put("job_type", job_type); for (Map.Entry<String, String> entrySet : dimension_to_sending.entrySet()) { properties.put(entrySet.getKey(), entrySet.getValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", entrySet.getKey(), entrySet.getValue())); } eventtelemetry.getMetrics().putAll(metrics); eventtelemetry.getProperties().putAll(properties); eventtelemetry.setName(even_name); eventtelemetry.getContext().getOperation().setId(task_id); if (if_send_event) { LOG.info(PATTERN_LOG_INFO + "send telemtry"); LOG2.info(PATTERN_LOG_INFO + "send telemtry"); telemetry.trackEvent(eventtelemetry); // telemetry.trackEvent(even_name, properties, metrics); } else { LOG.info(PATTERN_LOG_ERROR + "nothing telemtry"); LOG2.info(PATTERN_LOG_ERROR + "nothing telemtry"); } if (GroupCounters.size() > 0) { LOG.info(PATTERN_LOG_INFO + "send TraceTelemetry"); LOG2.info(PATTERN_LOG_INFO + "send TraceTelemetry"); for (TraceTelemetry GroupCounter : GroupCounters) { telemetry.trackTrace(GroupCounter); } } else { LOG.info(PATTERN_LOG_ERROR + "nothing TraceTelemetry"); } if (GroupExceptions.size() > 0) { LOG.info(PATTERN_LOG_INFO + "send ExceptionTelemetry"); for (ExceptionTelemetry exception_massage : GroupExceptions) { telemetry.trackException(exception_massage); } } else { LOG.info(PATTERN_LOG_ERROR + "nothing ExceptionTelemetry"); } } } } catch (Exception e) { String message = PATTERN_LOG_ERROR + "Creating a problem while send telemetry to Applocation insights......"; LOG.error(message, e); throw new Exception(message); } }
From source file:org.Microsoft.Telemetry.ServiceInformation.java
private void putToLog(TimelineEntity entity) throws Exception { try {//from ww w .j a v a2 s.co m WriteToFileLog("Entity Type :", entity.getEntityType()); WriteToFileLog("Entity ID :", entity.getEntityId()); WriteToFileLog("Domain ID :", entity.getDomainId()); if (entity.getStartTime() != null) { WriteToFileLog("Start Time :", CastFromTimeToStringFormat(entity.getStartTime())); } // print all events of Entity WriteToFileLog("\tprint all events ", ""); LOG.info(PATTERN_LOG_INFO + " print all events of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); WriteToFileLog("Start List of events of ( ", String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); for (TimelineEvent event : events) { WriteToFileLog("\tEvent Type :", event.getEventType() + ","); WriteToFileLog("\tTime Stamp :", CastFromTimeToStringFormat(event.getTimestamp()) + ","); if (event.getEventInfo() != null) { WriteToFileLog("\tprint all Event Info ", ""); // Get an iterator Set set = event.getEventInfo().entrySet(); Iterator iter = set.iterator(); if (set.size() > 0) { WriteToFileLog("\t\tStart List Event Info ", ""); } while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //sed information to history file WriteObjectToLog("\t\t { Key : " + me.getKey() + ", Value: ", me.getValue()); } if (set.size() > 0) { WriteToFileLog("\n\t\tEnd List Event Info ", ""); } } } WriteToFileLog("End List of events of :", entity.getEntityType() + "\n\n"); } else { LOG.info(PATTERN_LOG_INFO + "variable events of entity is null "); } // print all Related Entities of Entity WriteToFileLog("\tprint all Related Entities ", ""); LOG.info(PATTERN_LOG_INFO + "print all Related Entities of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getRelatedEntities() != null) { //Map<String, Set<String>> RelatedEntities = entity.getRelatedEntities(); WriteToFileLog("Start List of Related Entities of ( ", String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); Set set = entity.getRelatedEntities().entrySet(); Iterator iter = set.iterator(); if (set.size() > 0) { WriteToFileLog("\t\tStart List Related Entities ", ""); } while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //send information to history file Set<String> setcoll = (Set<String>) me.getValue(); if (setcoll.size() > 0) { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t["); } else { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t Empty Value"); } //over all parts of set<string> for (String str : setcoll) { WriteToFileLog("\t\t\t", str + ","); } WriteToFileLog("\n\t\t\t]", "\n}"); } if (set.size() > 0) { WriteToFileLog("\t\tEnd List Related Entities ", ""); } WriteToFileLog("\t\t}", ""); WriteToFileLog("End List of Related Entities of :", entity.getEntityType() + "\n\n"); } else { LOG.info(PATTERN_LOG_INFO + "variable Related Entities is null "); } // print all Primary Filters of Entity WriteToFileLog("\tprint all Primary Filters ", ""); LOG.info(PATTERN_LOG_INFO + "print all Primary Filters of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getPrimaryFilters() != null) { //Map<String, Set<Object>> PrimaryFilters = entity.getRelatedEntities(); WriteToFileLog("Start List of Primary Filters of ( ", String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); Set set = entity.getPrimaryFilters().entrySet(); Iterator iter = set.iterator(); if (set.size() > 0) { WriteToFileLog("\t\tStart List Primary Filters ", ""); } while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //sed information to history file Set<Object> setcoll = (Set<Object>) me.getValue(); if (setcoll.size() > 0) { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t["); } else { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t Empty Value"); } //over all parts of set<Object> for (Object obj : setcoll) { WriteObjectToLog("", obj); } WriteToFileLog("\n\t\t\t]", "\n}"); } if (set.size() > 0) { WriteToFileLog("\t\tEnd List Related Entities ", ""); } WriteToFileLog("\t\t}", ""); WriteToFileLog("End List of Primary Filters of :", entity.getEntityType() + "\n\n"); } else { LOG.info(PATTERN_LOG_INFO + "variable Primary Filters is null "); } // print all Other Info of Entity WriteToFileLog("\tprint all Other Info ", ""); LOG.info(PATTERN_LOG_INFO + "print all Other Info of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getOtherInfo() != null) { // Map<String,Object> OtherInfo = entity.getOtherInfo(); Set set = entity.getOtherInfo().entrySet(); if (set.size() > 0) { WriteToFileLog("Start List of Other Info of ( ", String.format("Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); } Iterator iter = set.iterator(); while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //sed information to history file WriteObjectToLog("\t Key : " + me.getKey() + " : Value : ", me.getValue()); } if (set.size() > 0) { WriteToFileLog("\nEnd List of Other Info of :", entity.getEntityType() + "\n\n"); } } else { LOG.info(PATTERN_LOG_INFO + "variable Other Info is null "); } LOG.info(PATTERN_LOG_INFO + "Finished to print all information of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n\n\n", entity.getEntityId(), entity.getEntityType())); } catch (Exception e) { String message = PATTERN_LOG_ERROR + "Creating a problem while writing the history file"; LOG.error(message, e); throw e; } finally { } }