List of usage examples for org.apache.hadoop.yarn.api.records.timeline TimelineEvent getEventInfo
public Map<String, Object> getEventInfo()
From source file:org.apache.tez.dag.history.logging.ats.TestHistoryEventTimelineConversion.java
License:Apache License
@Test(timeout = 5000) public void testConvertVertexParallelismUpdatedEvent() { TezVertexID vId = tezVertexID;// w w w . ja v a 2 s . co m Map<String, EdgeManagerPluginDescriptor> edgeMgrs = new HashMap<String, EdgeManagerPluginDescriptor>(); edgeMgrs.put("a", EdgeManagerPluginDescriptor.create("a.class").setHistoryText("text")); VertexParallelismUpdatedEvent event = new VertexParallelismUpdatedEvent(vId, 1, null, edgeMgrs, null, 10); TimelineEntity timelineEntity = HistoryEventTimelineConversion.convertToTimelineEntity(event); Assert.assertEquals(ATSConstants.TEZ_VERTEX_ID, timelineEntity.getEntityType()); Assert.assertEquals(vId.toString(), timelineEntity.getEntityId()); Assert.assertEquals(1, timelineEntity.getEvents().size()); final Map<String, Set<Object>> primaryFilters = timelineEntity.getPrimaryFilters(); Assert.assertEquals(2, primaryFilters.size()); Assert.assertTrue(primaryFilters.get(ATSConstants.APPLICATION_ID).contains(applicationId.toString())); Assert.assertTrue(primaryFilters.get(EntityTypes.TEZ_DAG_ID.name()).contains(tezDAGID.toString())); TimelineEvent evt = timelineEntity.getEvents().get(0); Assert.assertEquals(HistoryEventType.VERTEX_PARALLELISM_UPDATED.name(), evt.getEventType()); Assert.assertEquals(1, evt.getEventInfo().get(ATSConstants.NUM_TASKS)); Assert.assertEquals(10, evt.getEventInfo().get(ATSConstants.OLD_NUM_TASKS)); Assert.assertNotNull(evt.getEventInfo().get(ATSConstants.UPDATED_EDGE_MANAGERS)); Map<String, Object> updatedEdgeMgrs = (Map<String, Object>) evt.getEventInfo() .get(ATSConstants.UPDATED_EDGE_MANAGERS); Assert.assertEquals(1, updatedEdgeMgrs.size()); Assert.assertTrue(updatedEdgeMgrs.containsKey("a")); Map<String, Object> updatedEdgeMgr = (Map<String, Object>) updatedEdgeMgrs.get("a"); Assert.assertEquals("a.class", updatedEdgeMgr.get(DAGUtils.EDGE_MANAGER_CLASS_KEY)); Assert.assertEquals(1, timelineEntity.getOtherInfo().get(ATSConstants.NUM_TASKS)); }
From source file:org.Microsoft.Telemetry.ServiceInformation.java
/** * This method Getting TimelineEntity type of Mapreduce job And send * information as Telemetry to Application Insights * * @param entity ,Timeline Entity as type {@link TimelineEntity} *//* www . ja v a 2s . co m*/ private void Send_Mapreduce_Telemetry(TimelineEntity entity) throws Exception { try { Map<String, String> properties = new HashMap<String, String>(); Map<String, Double> metrics = new HashMap<String, Double>(); EventTelemetry eventtelemetry = new EventTelemetry(); List<TraceTelemetry> GroupCounters = new ArrayList<TraceTelemetry>(); List<ExceptionTelemetry> GroupExceptions = new ArrayList<ExceptionTelemetry>(); String Event_Name = ""; String job_type = "MAPREDUCE"; String job_status = ""; String task_id = ""; String even_name = ""; Boolean if_send_event = true; if (entity.getEntityId().matches("job(.*)")) { task_id = entity.getEntityId().replaceFirst("job", "Application"); } else if (entity.getEntityId().matches("task(.*)")) { task_id = entity.getEntityId().replaceFirst("task", "Application"); } if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); for (TimelineEvent event : events) { Event_Name = event.getEventType(); LOG.info(PATTERN_LOG_INFO + String.format("Event Type %s Entity ID :%s Entity Type : %s :", event.getEventType(), entity.getEntityId(), entity.getEntityType())); if (event.getEventInfo() != null && (Event_Name.equals("JOB_FAILED") || Event_Name.equals("TASK_FAILED"))) { Map<String, Object> eventInfo = event.getEventInfo(); if (Event_Name.equals("JOB_FAILED") && eventInfo.get("DIAGNOSTICS") != null && (((String) eventInfo.get("DIAGNOSTICS")).equals("") == false)) { LOG.info(PATTERN_LOG_INFO + "added diagnostics to tracetelemetry of job failed"); String message = (String) eventInfo.get("DIAGNOSTICS"); Exception e = new Exception(message); ExceptionTelemetry traceexception = new ExceptionTelemetry(e); traceexception.setSeverityLevel(SeverityLevel.Error); traceexception.getContext().getOperation().setId(task_id); GroupExceptions.add(traceexception); } if (Event_Name.equals("TASK_FAILED") && eventInfo.get("ERROR") != null && (((String) eventInfo.get("ERROR")).equals("") == false)) { LOG.info(PATTERN_LOG_INFO + "added error to tracetelemetry of task failed"); String message = (String) eventInfo.get("ERROR"); Exception e = new Exception(message); ExceptionTelemetry traceexception1 = new ExceptionTelemetry(e); ExceptionTelemetry traceexception2 = new ExceptionTelemetry(e); traceexception1.setSeverityLevel(SeverityLevel.Error); traceexception2.setSeverityLevel(SeverityLevel.Error); traceexception2.getContext().getOperation().setId(task_id); traceexception1.getContext().getOperation().setId(job_id); GroupExceptions.add(traceexception1); GroupExceptions.add(traceexception2); } } } } else { LOG.info(PATTERN_LOG_INFO + "No information about the event"); } if (Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("JOB_SUBMITTED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("TASK_FAILED")) { if (Event_Name.equals("JOB_SUBMITTED")) { job_id = entity.getEntityId().replaceFirst("job", "Application"); dimension_to_sending.clear(); if (dimension_from_config.size() > 0) { long startTime = System.currentTimeMillis(); dimension_to_sending = get_properties_values(dimension_from_config, job_id); long endTime = System.currentTimeMillis(); long duration = (endTime - startTime); LOG.info(PATTERN_LOG_INFO + String.format( "The duration of request http for information from server is %s (ms)", Long.toString(duration))); } } if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); for (TimelineEvent event : events) { Set set = event.getEventInfo().entrySet(); if (set.size() > 0) { WriteToFileLog("Start List of Other Info of ( ", String.format("Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); } Iterator iter = set.iterator(); while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); switch ((String) me.getKey()) { case "TASK_TYPE": if (Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("TASK_FAILED")) { properties.put("task_type", (String) me.getValue()); } break; case "STATUS": case "JOB_STATUS": if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("JOB_SUBMITTED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED")) { job_status = (String) me.getValue(); if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("TASK_FINISHED")) { properties.put("job_status", job_status); } } break; case "START_TIME": case "SUBMIT_TIME": case "FINISH_TIME": if (Event_Name.equals("TASK_STARTED") && ((String) me.getKey()).equals("START_TIME")) { times.put(entity.getEntityId(), (long) me.getValue()); eventtelemetry.setTimestamp(new Date((long) me.getValue())); } else if (Event_Name.equals("JOB_SUBMITTED") && ((String) me.getKey()).equals("SUBMIT_TIME")) { times.put(entity.getEntityId(), (long) me.getValue()); eventtelemetry.setTimestamp(new Date((long) me.getValue())); } else if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("TASK_FINISHED") || Event_Name.equals("JOB_FINISHED")) { if (!times.isEmpty()) { if (times.get(entity.getEntityId()) != null) { long start_time = times.get(entity.getEntityId()); long end_time = (long) me.getValue(); double Duration = get_SecondsDifferent(start_time, end_time); metrics.put("Duration", Duration); times.remove(entity.getEntityId()); eventtelemetry.setTimestamp(new Date(end_time)); } else { LOG.error(PATTERN_LOG_ERROR + String.format("not founf start time of task %s in map times ", entity.getEntityId())); if_send_event = false; } } else { LOG.error(PATTERN_LOG_ERROR + String.format( "not founf start time of task %s in map times the map is Empty", entity.getEntityId())); if_send_event = false; } } break; case "TOTAL_COUNTERS_GROUPS": case "COUNTERS_GROUPS": case "REDUCE_COUNTERS_GROUPS": case "MAP_COUNTERS_GROUPS": if (me.getValue() instanceof ArrayList) { if (!((ArrayList) me.getValue()).isEmpty()) { Object[] ob = ((ArrayList) me.getValue()).toArray(); for (Object part : ob) { if (part instanceof LinkedHashMap) { Map<Object, Object> linkedHashMap = (LinkedHashMap) part; TraceTelemetry tracetelemetry = null; if (((String) me.getKey()).equals("TOTAL_COUNTERS_GROUPS")) { tracetelemetry = new TraceTelemetry( "total job finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } else if (((String) me.getKey()) .equals("REDUCE_COUNTERS_GROUPS")) { tracetelemetry = new TraceTelemetry( "reduce job finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } else if (((String) me.getKey()).equals("MAP_COUNTERS_GROUPS")) { tracetelemetry = new TraceTelemetry( "map job finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } else { tracetelemetry = new TraceTelemetry( "task finished : " + (String) linkedHashMap.get("DISPLAY_NAME"), SeverityLevel.Information); } tracetelemetry.getContext().getOperation().setId(task_id); for (Map.Entry<Object, Object> entrySet : linkedHashMap .entrySet()) { Object key = entrySet.getKey(); Object value1 = entrySet.getValue(); if ((key instanceof String) && (value1 instanceof ArrayList)) { Object[] ob1 = ((ArrayList) value1).toArray(); for (Object part1 : ob1) { if (part1 instanceof LinkedHashMap) { Map<Object, Object> linkedHashMap2 = (LinkedHashMap) part1; if (linkedHashMap2 .get("VALUE") instanceof Integer) { tracetelemetry.getProperties().put( (String) linkedHashMap2.get("NAME"), String.format("%d", (Integer) linkedHashMap2 .get("VALUE"))); if (((String) me.getKey()) .equals("TOTAL_COUNTERS_GROUPS")) { switch ((String) linkedHashMap2 .get("NAME")) { case "MAP_INPUT_RECORDS": metrics.put("RECORDS READ", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; case "REDUCE_OUTPUT_RECORDS": metrics.put("RECORDS WRITTEN", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; // case "FILE_BYTES_READ": case "WASB_BYTES_READ": case "HDFS_BYTES_READ": metrics.put("BYTES READ", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; //case "FILE_BYTES_WRITTEN": case "WASB_BYTES_WRITTEN": case "HDFS_BYTES_WRITTEN": metrics.put("BYTES WRITTEN", ((Integer) linkedHashMap2 .get("VALUE")) .doubleValue()); break; default: break; } } } } } } else if ((key instanceof String) && (value1 instanceof String)) { } else if ((key instanceof String) && (value1 instanceof Integer)) { } } GroupCounters.add(tracetelemetry); } } } } else { LOG.info(PATTERN_LOG_INFO + String.format("The data type %s is not supported in this version", me.getValue().getClass().getName())); } break; case "NUM_REDUCES": case "NUM_MAPS": case "FINISHED_MAPS": case "FINISHED_REDUCES": case "FAILED_REDUCES": case "FAILED_MAPS": metrics.put((String) me.getKey(), ((Integer) me.getValue()).doubleValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", (String) me.getKey(), ((Integer) me.getValue()).toString())); LOG2.info(PATTERN_LOG_INFO + String.format("%s is %s", (String) me.getKey(), ((Integer) me.getValue()).toString())); break; default: break; } } } } else { LOG.info(PATTERN_LOG_INFO + "No information about event "); } if (Event_Name.equals("TASK_FAILED") || Event_Name.equals("JOB_FAILED") || Event_Name.equals("JOB_FINISHED") || Event_Name.equals("JOB_SUBMITTED") || Event_Name.equals("TASK_STARTED") || Event_Name.equals("TASK_FINISHED")) { switch (Event_Name) { case "JOB_SUBMITTED": even_name = "job_started"; break; case "JOB_FINISHED": case "JOB_FAILED": even_name = "job_finished"; for (Map.Entry<String, String> entrySet : dimension_to_sending.entrySet()) { properties.put(entrySet.getKey(), entrySet.getValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", entrySet.getKey(), entrySet.getValue())); } dimension_to_sending.clear(); if (metrics.get("NUM_REDUCES") != null && metrics.get("NUM_MAPS") != null && metrics.get("FINISHED_MAPS") != null && metrics.get("FINISHED_REDUCES") != null) { metrics.put("TOTAL_LAUNCHED_TASKS", metrics.get("NUM_REDUCES") + metrics.get("NUM_MAPS")); metrics.put("NUM_SUCCEEDED_TASKS", metrics.get("FINISHED_MAPS") + metrics.get("FINISHED_REDUCES")); //metrics.put("NUM_SUCCEEDED_TASKS", metrics.get("TOTAL_LAUNCHED_TASKS") - (metrics.get("FAILED_REDUCES") + metrics.get("FAILED_MAPS"))); LOG.info(PATTERN_LOG_INFO + String.format("TOTAL_LAUNCHED_TASKS is %s", metrics.get("TOTAL_LAUNCHED_TASKS").toString())); LOG.info(PATTERN_LOG_INFO + String.format("NUM_SUCCEEDED_TASKS is %s", metrics.get("NUM_SUCCEEDED_TASKS").toString())); LOG2.info(PATTERN_LOG_INFO + String.format("TOTAL_LAUNCHED_TASKS is %s", metrics.get("TOTAL_LAUNCHED_TASKS").toString())); LOG2.info(PATTERN_LOG_INFO + String.format("NUM_SUCCEEDED_TASKS is %s", metrics.get("NUM_SUCCEEDED_TASKS").toString())); } break; case "TASK_STARTED": even_name = "task_started"; break; case "TASK_FINISHED": case "TASK_FAILED": even_name = "task_finished"; break; default: break; } LOG.info(PATTERN_LOG_INFO + String.format("job_id is %s", job_id)); LOG.info(PATTERN_LOG_INFO + String.format("task_id is %s", task_id)); LOG.info(PATTERN_LOG_INFO + String.format("job_type is %s", job_type)); properties.put("job_id", job_id); properties.put("task_id", task_id); properties.put("job_type", job_type); for (Map.Entry<String, String> entrySet : dimension_to_sending.entrySet()) { properties.put(entrySet.getKey(), entrySet.getValue()); LOG.info(PATTERN_LOG_INFO + String.format("%s is %s", entrySet.getKey(), entrySet.getValue())); } eventtelemetry.getMetrics().putAll(metrics); eventtelemetry.getProperties().putAll(properties); eventtelemetry.setName(even_name); eventtelemetry.getContext().getOperation().setId(task_id); if (if_send_event) { LOG.info(PATTERN_LOG_INFO + "send telemtry"); LOG2.info(PATTERN_LOG_INFO + "send telemtry"); telemetry.trackEvent(eventtelemetry); // telemetry.trackEvent(even_name, properties, metrics); } else { LOG.info(PATTERN_LOG_ERROR + "nothing telemtry"); LOG2.info(PATTERN_LOG_ERROR + "nothing telemtry"); } if (GroupCounters.size() > 0) { LOG.info(PATTERN_LOG_INFO + "send TraceTelemetry"); LOG2.info(PATTERN_LOG_INFO + "send TraceTelemetry"); for (TraceTelemetry GroupCounter : GroupCounters) { telemetry.trackTrace(GroupCounter); } } else { LOG.info(PATTERN_LOG_ERROR + "nothing TraceTelemetry"); } if (GroupExceptions.size() > 0) { LOG.info(PATTERN_LOG_INFO + "send ExceptionTelemetry"); for (ExceptionTelemetry exception_massage : GroupExceptions) { telemetry.trackException(exception_massage); } } else { LOG.info(PATTERN_LOG_ERROR + "nothing ExceptionTelemetry"); } } } } catch (Exception e) { String message = PATTERN_LOG_ERROR + "Creating a problem while send telemetry to Applocation insights......"; LOG.error(message, e); throw new Exception(message); } }
From source file:org.Microsoft.Telemetry.ServiceInformation.java
private void putToLog(TimelineEntity entity) throws Exception { try {/*from w w w .ja v a 2 s. c o m*/ WriteToFileLog("Entity Type :", entity.getEntityType()); WriteToFileLog("Entity ID :", entity.getEntityId()); WriteToFileLog("Domain ID :", entity.getDomainId()); if (entity.getStartTime() != null) { WriteToFileLog("Start Time :", CastFromTimeToStringFormat(entity.getStartTime())); } // print all events of Entity WriteToFileLog("\tprint all events ", ""); LOG.info(PATTERN_LOG_INFO + " print all events of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getEvents() != null) { List<TimelineEvent> events = entity.getEvents(); WriteToFileLog("Start List of events of ( ", String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); for (TimelineEvent event : events) { WriteToFileLog("\tEvent Type :", event.getEventType() + ","); WriteToFileLog("\tTime Stamp :", CastFromTimeToStringFormat(event.getTimestamp()) + ","); if (event.getEventInfo() != null) { WriteToFileLog("\tprint all Event Info ", ""); // Get an iterator Set set = event.getEventInfo().entrySet(); Iterator iter = set.iterator(); if (set.size() > 0) { WriteToFileLog("\t\tStart List Event Info ", ""); } while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //sed information to history file WriteObjectToLog("\t\t { Key : " + me.getKey() + ", Value: ", me.getValue()); } if (set.size() > 0) { WriteToFileLog("\n\t\tEnd List Event Info ", ""); } } } WriteToFileLog("End List of events of :", entity.getEntityType() + "\n\n"); } else { LOG.info(PATTERN_LOG_INFO + "variable events of entity is null "); } // print all Related Entities of Entity WriteToFileLog("\tprint all Related Entities ", ""); LOG.info(PATTERN_LOG_INFO + "print all Related Entities of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getRelatedEntities() != null) { //Map<String, Set<String>> RelatedEntities = entity.getRelatedEntities(); WriteToFileLog("Start List of Related Entities of ( ", String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); Set set = entity.getRelatedEntities().entrySet(); Iterator iter = set.iterator(); if (set.size() > 0) { WriteToFileLog("\t\tStart List Related Entities ", ""); } while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //send information to history file Set<String> setcoll = (Set<String>) me.getValue(); if (setcoll.size() > 0) { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t["); } else { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t Empty Value"); } //over all parts of set<string> for (String str : setcoll) { WriteToFileLog("\t\t\t", str + ","); } WriteToFileLog("\n\t\t\t]", "\n}"); } if (set.size() > 0) { WriteToFileLog("\t\tEnd List Related Entities ", ""); } WriteToFileLog("\t\t}", ""); WriteToFileLog("End List of Related Entities of :", entity.getEntityType() + "\n\n"); } else { LOG.info(PATTERN_LOG_INFO + "variable Related Entities is null "); } // print all Primary Filters of Entity WriteToFileLog("\tprint all Primary Filters ", ""); LOG.info(PATTERN_LOG_INFO + "print all Primary Filters of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getPrimaryFilters() != null) { //Map<String, Set<Object>> PrimaryFilters = entity.getRelatedEntities(); WriteToFileLog("Start List of Primary Filters of ( ", String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); Set set = entity.getPrimaryFilters().entrySet(); Iterator iter = set.iterator(); if (set.size() > 0) { WriteToFileLog("\t\tStart List Primary Filters ", ""); } while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //sed information to history file Set<Object> setcoll = (Set<Object>) me.getValue(); if (setcoll.size() > 0) { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t["); } else { WriteToFileLog("\t\t { Key: (" + me.getKey() + " ) ", "\n\t\t\t Empty Value"); } //over all parts of set<Object> for (Object obj : setcoll) { WriteObjectToLog("", obj); } WriteToFileLog("\n\t\t\t]", "\n}"); } if (set.size() > 0) { WriteToFileLog("\t\tEnd List Related Entities ", ""); } WriteToFileLog("\t\t}", ""); WriteToFileLog("End List of Primary Filters of :", entity.getEntityType() + "\n\n"); } else { LOG.info(PATTERN_LOG_INFO + "variable Primary Filters is null "); } // print all Other Info of Entity WriteToFileLog("\tprint all Other Info ", ""); LOG.info(PATTERN_LOG_INFO + "print all Other Info of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); if (entity.getOtherInfo() != null) { // Map<String,Object> OtherInfo = entity.getOtherInfo(); Set set = entity.getOtherInfo().entrySet(); if (set.size() > 0) { WriteToFileLog("Start List of Other Info of ( ", String.format("Entity ID :%s , Entity Type : %s )\n\n", entity.getEntityId(), entity.getEntityType())); } Iterator iter = set.iterator(); while (iter.hasNext()) { // Display elements Map.Entry me = (Map.Entry) iter.next(); //sed information to history file WriteObjectToLog("\t Key : " + me.getKey() + " : Value : ", me.getValue()); } if (set.size() > 0) { WriteToFileLog("\nEnd List of Other Info of :", entity.getEntityType() + "\n\n"); } } else { LOG.info(PATTERN_LOG_INFO + "variable Other Info is null "); } LOG.info(PATTERN_LOG_INFO + "Finished to print all information of Entity Name :" + String.format( "Entity ID :%s , Entity Type : %s )\n\n\n\n", entity.getEntityId(), entity.getEntityType())); } catch (Exception e) { String message = PATTERN_LOG_ERROR + "Creating a problem while writing the history file"; LOG.error(message, e); throw e; } finally { } }