Example usage for java.io IOException getStackTrace

List of usage examples for java.io IOException getStackTrace

Introduction

In this page you can find the example usage for java.io IOException getStackTrace.

Prototype

public StackTraceElement[] getStackTrace() 

Source Link

Document

Provides programmatic access to the stack trace information printed by #printStackTrace() .

Usage

From source file:org.openhab.binding.denonmarantz.internal.connector.http.DenonMarantzHttpConnector.java

/**
 * Set up the connection to the receiver by starting to poll the HTTP API.
 *///www  .  j a  v a2 s.co m
@Override
public void connect() {
    if (!isPolling()) {
        logger.debug("HTTP polling started.");
        try {
            setConfigProperties();
        } catch (IOException e) {
            logger.debug("IO error while retrieving document:", e);
            state.connectionError("IO error while connecting to AVR: " + e.getMessage());
            return;
        }

        pollingJob = scheduler.scheduleWithFixedDelay(() -> {
            try {
                refreshHttpProperties();
            } catch (IOException e) {
                logger.debug("IO error while retrieving document: {}", e);
                state.connectionError("IO error while connecting to AVR: " + e.getMessage());
                stopPolling();
            } catch (RuntimeException e) {
                /**
                 * We need to catch this RuntimeException, as otherwise the polling stops.
                 * Log as error as it could be a user configuration error.
                 */
                StringBuilder sb = new StringBuilder();
                for (StackTraceElement s : e.getStackTrace()) {
                    sb.append(s.toString()).append("\n");
                }
                logger.error("Error while polling Http: \"{}\". Stacktrace: \n{}", e.getMessage(),
                        sb.toString());
            }
        }, 0, config.httpPollingInterval, TimeUnit.SECONDS);
    }
}

From source file:org.apache.http.impl.execchain.RetryExec.java

public CloseableHttpResponse execute(final HttpRoute route, final HttpRequestWrapper request,
        final HttpClientContext context, final HttpExecutionAware execAware) throws IOException, HttpException {
    Args.notNull(route, "HTTP route");
    Args.notNull(request, "HTTP request");
    Args.notNull(context, "HTTP context");
    final Header[] origheaders = request.getAllHeaders();
    for (int execCount = 1;; execCount++) {
        try {/*from  www . j a  va  2s . com*/
            return this.requestExecutor.execute(route, request, context, execAware);
        } catch (final IOException ex) {
            if (execAware != null && execAware.isAborted()) {
                this.log.debug("Request has been aborted");
                throw ex;
            }
            if (retryHandler.retryRequest(ex, execCount, context)) {
                if (this.log.isInfoEnabled()) {
                    this.log.info("I/O exception (" + ex.getClass().getName()
                            + ") caught when processing request to " + route + ": " + ex.getMessage());
                }
                if (this.log.isDebugEnabled()) {
                    this.log.debug(ex.getMessage(), ex);
                }
                if (!Proxies.isRepeatable(request)) {
                    this.log.debug("Cannot retry non-repeatable request");
                    throw new NonRepeatableRequestException(
                            "Cannot retry request " + "with a non-repeatable request entity", ex);
                }
                request.setHeaders(origheaders);
                if (this.log.isInfoEnabled()) {
                    this.log.info("Retrying request to " + route);
                }
            } else {
                if (ex instanceof NoHttpResponseException) {
                    final NoHttpResponseException updatedex = new NoHttpResponseException(
                            route.getTargetHost().toHostString() + " failed to respond");
                    updatedex.setStackTrace(ex.getStackTrace());
                    throw updatedex;
                } else {
                    throw ex;
                }
            }
        }
    }
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaXYJob.java

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {/*from  w ww. jav  a  2  s  .  c o  m*/
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SigmaXYJob.class);
    log.info("SigmaXY Job initialized");
    log.warn("SigmaXY job: Processing...Do not terminate/close");
    log.debug("SigmaXY job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaXYMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", x + "," + y);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("SigmaXY Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("SigmaXY job: Mapping process completed");

    log.debug("SigmaXY job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    FileSystem fs;
    try {
        fs = FileSystem.get(job.getConfiguration());
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("SigmaXY Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("SigmaXY job: Reducing process completed");
    log.info("SigmaXY Job completed\n");
    return value;
}

From source file:org.apache.http.HC4.impl.execchain.RetryExec.java

@Override
public CloseableHttpResponse execute(final HttpRoute route, final HttpRequestWrapper request,
        final HttpClientContext context, final HttpExecutionAware execAware) throws IOException, HttpException {
    Args.notNull(route, "HTTP route");
    Args.notNull(request, "HTTP request");
    Args.notNull(context, "HTTP context");
    final Header[] origheaders = request.getAllHeaders();
    for (int execCount = 1;; execCount++) {
        try {/*from  ww w.j  av  a2  s  .  com*/
            return this.requestExecutor.execute(route, request, context, execAware);
        } catch (final IOException ex) {
            if (execAware != null && execAware.isAborted()) {
                this.log.debug("Request has been aborted");
                throw ex;
            }
            if (retryHandler.retryRequest(ex, execCount, context)) {
                if (this.log.isInfoEnabled()) {
                    this.log.info("I/O exception (" + ex.getClass().getName()
                            + ") caught when processing request to " + route + ": " + ex.getMessage());
                }
                if (this.log.isDebugEnabled()) {
                    this.log.debug(ex.getMessage(), ex);
                }
                if (!RequestEntityProxy.isRepeatable(request)) {
                    this.log.debug("Cannot retry non-repeatable request");
                    throw new NonRepeatableRequestException(
                            "Cannot retry request " + "with a non-repeatable request entity", ex);
                }
                request.setHeaders(origheaders);
                if (this.log.isInfoEnabled()) {
                    this.log.info("Retrying request to " + route);
                }
            } else {
                if (ex instanceof NoHttpResponseException) {
                    final NoHttpResponseException updatedex = new NoHttpResponseException(
                            route.getTargetHost().toHostString() + " failed to respond");
                    updatedex.setStackTrace(ex.getStackTrace());
                    throw updatedex;
                } else {
                    throw ex;
                }
            }
        }
    }
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaSqJob.java

public Double call() throws NectarException {
    // TODO Auto-generated method stub
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {/*from w w  w. j a v a  2  s .  co m*/
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SigmaSqJob.class);
    log.info("Sigma square Job initialized");
    log.warn("Sigma square job: Processing...Do not terminate/close");
    log.debug("Sigma square job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaSqMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);
    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sigma square Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sigma square job: Mapping process completed");

    log.debug("Sigma square job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    try {
        fs = FileSystem.get(job.getConfiguration());
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sigma square Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sigma square job: Reducing process completed");
    log.info("Sigma square Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.MeanJob.java

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("mean job");
    try {/*  w  w w .ja  v a2 s. c o m*/
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(MeanJob.class);
    log.info("Mean Job initialized");
    log.warn("Mean job: Processing...Do not terminate/close");
    log.debug("Mean job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, MeanMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    job.getConfiguration().set("fields.spec", "" + column);
    job.getConfiguration().setInt("n", n);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Mean Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Mean job: Mapping process completed");

    log.debug("Mean job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Mean Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Mean job: Reducing process completed");
    log.info("Mean Job completed\n");
    return value;
}

From source file:com.hp.test.framework.model.testcasegen.TestCaseGenerator.java

/**
 * @param ar/*w  ww.jav a2  s.  c o m*/
 * @throws java.lang.Exception
 */
public void CreateTemplatefromModel(String modelxml_filename, int model_gid) throws SQLException {
    ReadXMLFileUsingDom ReadModelXML = new ReadXMLFileUsingDom();

    ReadProperties rp = new ReadProperties();
    int batch_inc = 0;
    File f = new File(modelxml_filename);

    ModelXMLFile = modelxml_filename;
    ModelXMLName = f.getName();
    f = null;
    ModelXMLName1 = ModelXMLName.split("\\.");
    System.out.println(ModelXMLName + ModelXMLName1);
    ModelXMLFile = rp.getProperty("XMLModelLocation") + "\\" + ModelXMLName;
    log.info("Started generating TestScenarios from Model XML file(s) ::> " + ModelXMLFile);

    ReadModelXML.ReadXMLModel(ModelXMLFile);

    List<Map<String, String>> list = new LinkedList<Map<String, String>>();
    combinations(mapmainnode, list);

    int listsize = list.size();
    int i = 0;

    getmodelgid = String.valueOf(model_gid);
    for (Map<String, String> combination : list) {

        //String Template;
        String xml = MaptoXML.toXML(combination, "root");

        //   log.info("Print XML String"+xml);
        //System.out.println( combination );
        Template = getmodelgid + "_" + ModelXMLName1[0] + "Template" + i;
        //log.info("template path"+Template);
        //If modelxmllocation does not exists create it
        //Create Store path if not exists
        CreateDirectory.createdir(rp.getProperty("StorePath"));
        TemplatePath = rp.getProperty("StorePath") + "\\" + Template + ".xml";
        template_list.put(TemplatePath, getmodelgid);
        try {

            BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
                    new FileOutputStream(rp.getProperty("StorePath") + "\\" + Template + ".xml"), "UTF-8"));
            writer.write(xml + System.getProperty("line.separator"));

            writer.close();

        } catch (IOException e) {
            System.out.println(e);
        }
        i = i + 1;
    }

    mapmainnode.clear();
    list.clear();
    // log.info("mapmainvode and list cleared");

    connection = ConnectDB.ConnectToolDB();
    connection.setAutoCommit(false);
    String query = "INSERT INTO DM_SCENARIO_REF(TEMPLATE_PATH, TEMPLATE_EXTENSION, TEMPLATE_NAME,MODEL_GID_REF,XML_FILE) values (?,?,?,?,?)";
    prestatement = connection.prepareStatement(query);
    batch_inc = 0;
    ConvertFiletoString convertfiletostring = new ConvertFiletoString();
    for (String temp_path : template_list.keySet()) {
        i++;
        try {

            File temp_file = new File(temp_path);
            String xmlString = convertfiletostring.convertFiletoString(temp_path);
            //Unescape XML special characters
            xmlString = StringEscapeUtils.unescapeXml(xmlString);
            prestatement.setString(1, temp_path);
            prestatement.setString(2, ".xml");
            prestatement.setString(3, temp_file.getName());
            temp_file = null;
            prestatement.setString(4, template_list.get(temp_path));
            prestatement.setString(5, xmlString);

            prestatement.addBatch();

        } catch (SQLException e) {

            log.error("Exception Caught*******" + e.getStackTrace());

        }
        batch_inc = batch_inc + 1;
        if (batch_inc == 200) {
            prestatement.executeBatch();
            connection.commit();
            log.debug("Saved a batch of Queries");
            connection.setAutoCommit(false);
            batch_inc = 0;
        }

    }

    prestatement.executeBatch();
    connection.commit();
    prestatement.clearBatch();
    log.info("Saved a batch of Queries");
    connection.setAutoCommit(false);
    if (prestatement != null) {
        prestatement.close();
    }
    log.info("Completed generating TestScenarios for the Model Xml file" + modelxml_filename + "  Generated "
            + template_list.size() + " TestScenarios");
    template_list.clear();
    connection.close();
}

From source file:de.decoit.visa.topology.TopologyStorage.java

/**
 * Layout the topology by using the de.decoit.visa.gridlayout.GridLayout
 * class. By default, the 'neato' executable will be used for layouting. If
 * any nodes with fixed positions are detected, the 'fdp' executable will be
 * used to get better results. All existing switches and VMs will be used as
 * nodes, all cables as edges. Already positioned nodes will not be moved.
 *//*from w  w  w  . ja v  a2  s . c o  m*/
public void layoutTopology() {
    try {
        // Layout the subgrids of all component groups

        HashSet<ComponentGroup> processedGroups = new HashSet<>();

        for (Map.Entry<String, ComponentGroup> groupEntry : storage.entrySet()) {
            // Process all groups except the global group 0.0.0.0
            if (!groupEntry.getValue().isGlobalGroup()) {
                // neato is the default layouter
                String command = "neato";

                // Create a new layouter
                GridLayout layout = new GridLayout(groupEntry.getValue().subGridDimensions);

                // Add all components of this group to the layouter
                for (NetworkComponent nc : groupEntry.getValue().componentList) {
                    // Use fdp layouter if there are nodes with fixed
                    // positions
                    if (!layout.addComponent(nc)) {
                        command = "fdp";
                    }
                }

                for (NetworkCable nc : groupEntry.getValue().cables) {
                    layout.addCable(nc);
                }

                for (Map.Entry<String, GroupSwitch> gsEntry : groupEntry.getValue().groupSwitches.entrySet()) {
                    // Use fdp layouter if there are nodes with fixed
                    // positions
                    if (!layout.addGroupSwitch(gsEntry.getValue())) {
                        command = "fdp";
                    }
                }

                // Run the layouter
                layout.run(command);

                // Add the current group to the processed groups set
                processedGroups.add(groupEntry.getValue());
            }
        }

        // Layout the base layer group 0.0.0.0

        // neato is the default layouter
        String command = "neato";

        // Create a new layouter
        GridLayout layout = new GridLayout(TEBackend.getGridDimensions());

        // Add all components to the layouter
        for (NetworkComponent nc : getComponentGroupByName("0.0.0.0").componentList) {
            // Use fdp layouter if there are nodes with fixed positions
            if (!layout.addComponent(nc)) {
                command = "fdp";
            }
        }

        for (NetworkCable nc : getComponentGroupByName("0.0.0.0").cables) {
            if (nc.getLeft().getComponentGroup().equals(nc.getRight().getComponentGroup())) {
                layout.addCable(nc);
            }
        }

        // Add all group objects to the layouter
        for (ComponentGroup cg : processedGroups) {
            // Use fdp layouter if there are nodes with fixed positions
            if (!layout.addComponentGroup(cg)) {
                command = "fdp";
            }
        }

        // Run the layouter
        layout.run(command);
    } catch (IOException ex) {
        StringBuilder sb = new StringBuilder("Caught: [");
        sb.append(ex.getClass().getSimpleName());
        sb.append("] ");
        sb.append(ex.getMessage());
        log.error(sb.toString());

        if (log.isDebugEnabled()) {
            for (StackTraceElement ste : ex.getStackTrace()) {
                log.debug(ste.toString());
            }
        }
    }
}

From source file:org.apache.http.impl.client.DefaultRequestDirector.java

/**
 * Execute request and retry in case of a recoverable I/O failure
 *///  w w w  . j  ava 2s. c  om
private HttpResponse tryExecute(final RoutedRequest req, final HttpContext context)
        throws HttpException, IOException {
    final RequestWrapper wrapper = req.getRequest();
    final HttpRoute route = req.getRoute();
    HttpResponse response = null;

    Exception retryReason = null;
    for (;;) {
        // Increment total exec count (with redirects)
        execCount++;
        // Increment exec count for this particular request
        wrapper.incrementExecCount();
        if (!wrapper.isRepeatable()) {
            this.log.debug("Cannot retry non-repeatable request");
            if (retryReason != null) {
                throw new NonRepeatableRequestException(
                        "Cannot retry request " + "with a non-repeatable request entity.  The cause lists the "
                                + "reason the original request failed.",
                        retryReason);
            } else {
                throw new NonRepeatableRequestException(
                        "Cannot retry request " + "with a non-repeatable request entity.");
            }
        }

        try {
            if (!managedConn.isOpen()) {
                // If we have a direct route to the target host
                // just re-open connection and re-try the request
                if (!route.isTunnelled()) {
                    this.log.debug("Reopening the direct connection.");
                    managedConn.open(route, context, params);
                } else {
                    // otherwise give up
                    this.log.debug("Proxied connection. Need to start over.");
                    break;
                }
            }

            if (this.log.isDebugEnabled()) {
                this.log.debug("Attempt " + execCount + " to execute request");
            }
            response = requestExec.execute(wrapper, managedConn, context);
            break;

        } catch (final IOException ex) {
            this.log.debug("Closing the connection.");
            try {
                managedConn.close();
            } catch (final IOException ignore) {
            }
            if (retryHandler.retryRequest(ex, wrapper.getExecCount(), context)) {
                if (this.log.isInfoEnabled()) {
                    this.log.info("I/O exception (" + ex.getClass().getName()
                            + ") caught when processing request to " + route + ": " + ex.getMessage());
                }
                if (this.log.isDebugEnabled()) {
                    this.log.debug(ex.getMessage(), ex);
                }
                if (this.log.isInfoEnabled()) {
                    this.log.info("Retrying request to " + route);
                }
                retryReason = ex;
            } else {
                if (ex instanceof NoHttpResponseException) {
                    final NoHttpResponseException updatedex = new NoHttpResponseException(
                            route.getTargetHost().toHostString() + " failed to respond");
                    updatedex.setStackTrace(ex.getStackTrace());
                    throw updatedex;
                } else {
                    throw ex;
                }
            }
        }
    }
    return response;
}

From source file:nz.dataview.websyncclientgui.WebSYNCClientGUIView.java

private void saveConfiguration() {
    WebSYNCClientGUIApp app = WebSYNCClientGUIApp.getApplication();

    try {//from w w w.ja v a2 s  .  c  o m
        saveConfig();
    } catch (java.io.IOException e) {
        showErrorDialog("Invalid Configurations",
                "Fatal error: could not save configuration file.  Changes were not saved.");
        String level = "WARN";
        String message = "Could not save configuration file: " + e.getMessage() + ", " + e.getStackTrace();
        LogWriter worker = app.logWriteService(isUp, message, level);
        app.getContext().getTaskService().execute(worker);
        return;
    }

    String level = "INFO", message = "Configuration saved via GUI", confirm = "";
    if (requiresRestart) {
        // restart the background service
        ServiceRestarter restarter = app.restartService();
        app.getContext().getTaskService().execute(restarter);

        confirm = ", restart of WebSYNC in progress";
        message += ", restart attempted";
    }

    showNoticeDialog("Configuration saved", "Configurations saved" + confirm);

    configureSaveButton.setEnabled(false);
    advancedSaveButton.setEnabled(false);
    LogWriter worker = app.logWriteService(isUp, message, level);
    app.getContext().getTaskService().execute(worker);
}