Example usage for java.io PrintStream checkError

List of usage examples for java.io PrintStream checkError

Introduction

In this page you can find the example usage for java.io PrintStream checkError.

Prototype

public boolean checkError() 

Source Link

Document

Flushes the stream and checks its error state.

Usage

From source file:org.apache.orc.tools.FileDump.java

static void printJsonData(final Reader reader) throws IOException, JSONException {
    PrintStream printStream = System.out;
    OutputStreamWriter out = new OutputStreamWriter(printStream, "UTF-8");
    RecordReader rows = reader.rows();/*from   w w  w  . j av a 2 s . com*/
    try {
        TypeDescription schema = reader.getSchema();
        VectorizedRowBatch batch = schema.createRowBatch();
        while (rows.nextBatch(batch)) {
            for (int r = 0; r < batch.size; ++r) {
                JSONWriter writer = new JSONWriter(out);
                printRow(writer, batch, schema, r);
                out.write("\n");
                out.flush();
                if (printStream.checkError()) {
                    throw new IOException("Error encountered when writing to stdout.");
                }
            }
        }
    } finally {
        rows.close();
    }
}

From source file:com.googlecode.android_scripting.jsonrpc.JsonRpcServerTest.java

public void testInvalidHandshake() throws IOException, JSONException, InterruptedException {
    JsonRpcServer server = new JsonRpcServer(null, "foo");
    InetSocketAddress address = server.startLocal(0);
    Socket client = new Socket();
    client.connect(address);//from w  w w.  j a v a  2 s . c  om
    PrintStream out = new PrintStream(client.getOutputStream());
    out.println(buildRequest(0, "_authenticate", Lists.newArrayList("bar")));
    BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()));
    JSONObject response = new JSONObject(in.readLine());
    Object error = response.get("error");
    assertTrue(JSONObject.NULL != error);
    while (!out.checkError()) {
        out.println(buildRequest(0, "makeToast", Lists.newArrayList("baz")));
    }
    client.close();
    // Further connections should fail;
    client = new Socket();
    try {
        client.connect(address);
        fail();
    } catch (IOException e) {
    }
}

From source file:com.github.lindenb.jvarkit.tools.misc.AlleleFrequencyCalculator.java

@Override
protected Collection<Throwable> call(String inputName) throws Exception {
    PrintStream out = null;
    VcfIterator in = null;/*from   w w w.  j  a v a2  s  . c  o  m*/
    try {
        final List<String> args = this.getInputFiles();
        if (inputName == null) {
            LOG.info("reading stdin");
            in = new VcfIteratorImpl(stdin());
        } else {
            LOG.info("reading " + args.get(0));
            in = VCFUtils.createVcfIterator(inputName);
        }

        out = openFileOrStdoutAsPrintStream();

        out.println("CHR\tPOS\tID\tREF\tALT\tTOTAL_CNT\tALT_CNT\tFRQ");
        while (in.hasNext() && !out.checkError()) {

            VariantContext ctx = in.next();
            Allele ref = ctx.getReference();
            if (ref == null)
                continue;
            if (ctx.getNSamples() == 0 || ctx.getAlternateAlleles().isEmpty())
                continue;
            Allele alt = ctx.getAltAlleleWithHighestAlleleCount();
            if (alt == null)
                continue;

            GenotypesContext genotypes = ctx.getGenotypes();
            if (genotypes == null)
                continue;
            int total_ctn = 0;
            int alt_ctn = 0;
            for (int i = 0; i < genotypes.size(); ++i) {
                Genotype g = genotypes.get(i);
                for (Allele allele : g.getAlleles()) {
                    if (allele.equals(ref)) {
                        total_ctn++;
                    } else if (allele.equals(alt)) {
                        total_ctn++;
                        alt_ctn++;
                    }
                }

            }

            out.print(ctx.getContig());
            out.print("\t");
            out.print(ctx.getStart());
            out.print("\t");
            out.print(ctx.hasID() ? ctx.getID() : ".");
            out.print("\t");
            out.print(ref.getBaseString());
            out.print("\t");
            out.print(alt.getBaseString());
            out.print("\t");
            out.print(total_ctn);
            out.print("\t");
            out.print(alt_ctn);
            out.print("\t");
            out.print(alt_ctn / (float) total_ctn);
            out.println();
        }
        out.flush();

        return RETURN_OK;
    } catch (Exception err) {
        return wrapException(err);
    } finally {
        CloserUtil.close(out);
        CloserUtil.close(in);
    }
}

From source file:com.intel.ssg.dcst.panthera.cli.PantheraCliDriver.java

int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) {
    int tryCount = 0;
    boolean needRetry;
    int ret = 0;/*from www.j  a  va2 s  .c om*/

    do {
        try {
            needRetry = false;
            if (proc != null) {
                if (proc instanceof Driver) {
                    SkinDriver qp = (SkinDriver) proc;
                    PrintStream out = ss.out;
                    long start = System.currentTimeMillis();
                    if (ss.getIsVerbose()) {
                        out.println(cmd);
                    }

                    qp.setTryCount(tryCount);
                    ret = qp.run(cmd).getResponseCode();
                    if (ret != 0) {
                        qp.close();
                        return ret;
                    }

                    ArrayList<String> res = new ArrayList<String>();

                    printHeader(qp, out);

                    int counter = 0;
                    try {
                        while (qp.getResults(res)) {
                            for (String r : res) {
                                out.println(r);
                            }
                            counter += res.size();
                            res.clear();
                            if (out.checkError()) {
                                break;
                            }
                        }
                    } catch (IOException e) {
                        console.printError(
                                "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
                        ret = 1;
                    }

                    int cret = qp.close();
                    if (ret == 0) {
                        ret = cret;
                    }

                    long end = System.currentTimeMillis();
                    double timeTaken = (end - start) / 1000.0;
                    console.printInfo("Time taken: " + timeTaken + " seconds"
                            + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));

                } else {
                    String firstToken = tokenizeCmd(cmd.trim())[0];
                    String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());

                    if (ss.getIsVerbose()) {
                        ss.out.println(firstToken + " " + cmd_1);
                    }
                    CommandProcessorResponse res = proc.run(cmd_1);
                    if (res.getResponseCode() != 0) {
                        ss.out.println("Query returned non-zero code: " + res.getResponseCode() + ", cause: "
                                + res.getErrorMessage());
                    }
                    ret = res.getResponseCode();
                }
            }
        } catch (CommandNeedRetryException e) {
            console.printInfo("Retry query with a different approach...");
            tryCount++;
            needRetry = true;
        }
    } while (needRetry);

    return ret;
}

From source file:com.github.lindenb.jvarkit.tools.misc.AddLinearIndexToBed.java

protected int doWork(InputStream is, PrintStream out) throws IOException {
    final Pattern tab = Pattern.compile("[\t]");
    BufferedReader in = new BufferedReader(new InputStreamReader(is));
    String line = null;/*from  www . j a  v  a2  s  .c o  m*/
    while ((line = in.readLine()) != null) {
        if (line.isEmpty() || line.startsWith("#") || line.startsWith("track") || line.startsWith("browser"))
            continue;
        String tokens[] = tab.split(line, 3);
        if (tokens.length < 2) {
            LOG.warn("Bad chrom/pos line:" + line);
            continue;
        }
        SAMSequenceRecord ssr = this.dictionary.getSequence(tokens[0]);
        if (ssr == null) {
            for (SAMSequenceRecord sr2 : this.dictionary.getSequences()) {
                LOG.info("available " + sr2.getSequenceName());
            }
            throw new IOException("undefined chromosome:" + tokens[0]);
        }
        int pos0 = Integer.parseInt(tokens[1]);
        if (pos0 < 0 || pos0 >= ssr.getSequenceLength()) {
            LOG.warn("position is out of range for : " + line + " length(" + tokens[0] + ")="
                    + ssr.getSequenceLength());
        }
        out.print(this.tid2offset[ssr.getSequenceIndex()] + pos0);
        out.print('\t');
        out.print(line);
        out.println();
        if (out.checkError())
            break;
    }
    return 0;
}

From source file:org.apache.hadoop.hive.cli.CliDriver.java

int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) {
    int tryCount = 0;
    boolean needRetry;
    int ret = 0;//from   w  ww .j  a v a2  s .  co m

    do {
        try {
            needRetry = false;
            if (proc != null) {
                if (proc instanceof Driver) {
                    Driver qp = (Driver) proc;
                    PrintStream out = ss.out;
                    long start = System.currentTimeMillis();
                    if (ss.getIsVerbose()) {
                        out.println(cmd);
                    }

                    qp.setTryCount(tryCount);
                    ret = qp.run(cmd).getResponseCode();
                    if (ret != 0) {
                        qp.close();
                        return ret;
                    }

                    // query has run capture the time
                    long end = System.currentTimeMillis();
                    double timeTaken = (end - start) / 1000.0;

                    ArrayList<String> res = new ArrayList<String>();

                    printHeader(qp, out);

                    // print the results
                    int counter = 0;
                    try {
                        if (out instanceof FetchConverter) {
                            ((FetchConverter) out).fetchStarted();
                        }
                        while (qp.getResults(res)) {
                            for (String r : res) {
                                out.println(r);
                            }
                            counter += res.size();
                            res.clear();
                            if (out.checkError()) {
                                break;
                            }
                        }
                    } catch (IOException e) {
                        console.printError(
                                "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
                        ret = 1;
                    }

                    int cret = qp.close();
                    if (ret == 0) {
                        ret = cret;
                    }

                    if (out instanceof FetchConverter) {
                        ((FetchConverter) out).fetchFinished();
                    }

                    console.printInfo("Time taken: " + timeTaken + " seconds"
                            + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
                } else {
                    String firstToken = tokenizeCmd(cmd.trim())[0];
                    String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());

                    if (ss.getIsVerbose()) {
                        ss.out.println(firstToken + " " + cmd_1);
                    }
                    CommandProcessorResponse res = proc.run(cmd_1);
                    if (res.getResponseCode() != 0) {
                        ss.out.println("Query returned non-zero code: " + res.getResponseCode() + ", cause: "
                                + res.getErrorMessage());
                    }
                    if (res.getConsoleMessages() != null) {
                        for (String consoleMsg : res.getConsoleMessages()) {
                            console.printInfo(consoleMsg);
                        }
                    }
                    ret = res.getResponseCode();
                }
            }
        } catch (CommandNeedRetryException e) {
            console.printInfo("Retry query with a different approach...");
            tryCount++;
            needRetry = true;
        }
    } while (needRetry);

    return ret;
}

From source file:org.apache.hadoop.hive.ql.MultiDriver.java

private int multipreoptimizetest() throws CommandNeedRetryException {
    int i;/*from  w w w . j a va  2s. c  o  m*/
    PerfLogger perfLogger = PerfLogger.getPerfLogger();

    for (i = 0; i < cmds.size(); i++) {
        TaskFactory.resetId();
        ParseContext pCtx = multiPctx.get(i);
        //  conf=(HiveConf)confs.get(i);
        conf = pCtx.getConf();
        ctx = pCtx.getContext();

        LOG.info("Before  MultidoPhase2forTest Optree:\n" + Operator.toString(pCtx.getTopOps().values()));
        // do Optimizer  gen MR task
        SemanticAnalyzer sem;
        try {
            sem = new SemanticAnalyzer(conf);
            sem.MultidoPhase2forTest(pCtx);
            sem.validate();

            plan = new QueryPlan(cmds.get(i), sem, perfLogger.getStartTime(PerfLogger.DRIVER_RUN));

            if (false) {
                String queryPlanFileName = ctx.getLocalScratchDir(true) + Path.SEPARATOR_CHAR + "queryplan.xml";
                LOG.info("query plan = " + queryPlanFileName);
                queryPlanFileName = new Path(queryPlanFileName).toUri().getPath();

                // serialize the queryPlan
                FileOutputStream fos = new FileOutputStream(queryPlanFileName);
                Utilities.serializeObject(plan, fos);
                fos.close();
            }

            // initialize FetchTask right here
            if (plan.getFetchTask() != null) {
                plan.getFetchTask().initialize(conf, plan, null);
            }

            // get the output schema
            schema = schemas.get(i);

        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        boolean requireLock = false;
        boolean ckLock = checkLockManager();

        if (ckLock) {
            boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
            if (lockOnlyMapred) {
                Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
                taskQueue.addAll(plan.getRootTasks());
                while (taskQueue.peek() != null) {
                    Task<? extends Serializable> tsk = taskQueue.remove();
                    requireLock = requireLock || tsk.requireLock();
                    if (requireLock) {
                        break;
                    }
                    if (tsk instanceof ConditionalTask) {
                        taskQueue.addAll(((ConditionalTask) tsk).getListTasks());
                    }
                    if (tsk.getChildTasks() != null) {
                        taskQueue.addAll(tsk.getChildTasks());
                    }
                    // does not add back up task here, because back up task should be the same
                    // type of the original task.
                }
            } else {
                requireLock = true;
            }
        }
        int ret;
        if (requireLock) {
            ret = acquireReadWriteLocks();
            if (ret != 0) {
                releaseLocks(ctx.getHiveLocks());
                //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
            }
        }

        ret = execute();
        if (ret != 0) {
            //if needRequireLock is false, the release here will do nothing because there is no lock
            releaseLocks(ctx.getHiveLocks());
            //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
        }

        //if needRequireLock is false, the release here will do nothing because there is no lock
        releaseLocks(ctx.getHiveLocks());

        //test output
        SessionState ss = SessionState.get();
        PrintStream out = ss.out;
        ArrayList<String> res = new ArrayList<String>();
        LOG.info("Output the result of query ID(" + i + "):");
        printHeader(this, out);
        int counter = 0;
        try {
            while (this.getResults(res)) {
                for (String r : res) {
                    out.println(r);
                }
                counter += res.size();
                res.clear();
                if (out.checkError()) {
                    break;
                }
            }
        } catch (IOException e) {
            console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                    "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
        }

    }

    return 0;
}

From source file:org.apache.hadoop.hive.ql.MultiDriver.java

private void multiOutputexplain() throws CommandNeedRetryException {

    //test output
    //output the result of the multiquery
    // set the var:plan schema ctx
    SessionState ss = SessionState.get();
    PrintStream out = ss.out;
    ArrayList<String> res = new ArrayList<String>();
    LOG.info("Output the result of Multi-query");
    try {/*w  ww .ja v  a 2  s.co m*/

        boolean isexplain = ctx.getExplain();
        if (true) {
            LOG.info("Output explain query plan:");
            printHeader(this, out);
            int counter = 0;

            while (this.getResults(res)) {
                for (String r : res) {
                    out.println(r);
                }
                counter += res.size();
                res.clear();
                if (out.checkError()) {
                    break;
                }
            }

            return;
        }
        int i;
        for (i = 0; i < multiPctx.size(); i++) {

            LOG.info("Output Multi-query ID:" + i);
            int counter = 0;
            if (!isexplain) {
                schema = schemas.get(i);
            } else {
                //schema=null;
            }
            printHeader(this, out);

            if (plan != null && ((MultiQueryPlan) plan).getfetchtasklist() != null) {
                plan.setFetchTask(((MultiQueryPlan) plan).getfetchtasklist().get(i));
            }
            this.ctx = multiPctx.get(i).getContext();

            while (this.getResults(res)) {
                for (String r : res) {
                    out.println(r);
                }
                counter += res.size();
                res.clear();
                if (out.checkError()) {
                    break;
                }
            }
        }
    } catch (IOException e) {
        console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
        // ret = 1;
    }

}

From source file:org.apache.hadoop.hive.ql.MultiDriver.java

private void multiOutputResult() throws CommandNeedRetryException {

    //test output
    //output the result of the multiquery
    // set the var:plan schema ctx
    SessionState ss = SessionState.get();
    PrintStream out = ss.out;
    ArrayList<String> res = new ArrayList<String>();
    LOG.info("Output the result of Multi-query");
    try {//  ww  w.ja v  a2  s  .  c  o m

        boolean isexplain = ctx.getExplain();
        if (isexplain) {
            LOG.info("Output explain query plan:");
            printHeader(this, out);
            int counter = 0;

            while (this.getResults(res)) {
                for (String r : res) {
                    out.println(r);
                }
                counter += res.size();
                res.clear();
                if (out.checkError()) {
                    break;
                }
            }

            return;
        }
        int i;
        for (i = 0; i < multiPctx.size(); i++) {

            System.out.println("Output Multi-query ID:" + i);
            int counter = 0;
            if (!isexplain) {
                schema = schemas.get(i);
            } else {
                //schema=null;
            }
            printHeader(this, out);

            if (plan != null && ((MultiQueryPlan) plan).getfetchtasklist() != null) {
                plan.setFetchTask(((MultiQueryPlan) plan).getfetchtasklist().get(i));
            }
            this.ctx = multiPctx.get(i).getContext();

            while (this.getResults(res)) {
                for (String r : res) {
                    out.println(r);
                }
                counter += res.size();
                res.clear();
                if (out.checkError()) {
                    break;
                }
            }
        }
    } catch (IOException e) {
        console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
        // ret = 1;
    }

}

From source file:org.nuclos.server.masterdata.ejb3.MasterDataFacadeBean.java

/**
 * Validate all masterdata entries against their meta information (length,
 * format, min, max etc.). The transaction type is "not supported" here in
 * order to avoid a transaction timeout, as the whole operation may take some
 * time.//from  w ww .  j a  v  a  2 s . c o m
 *
 * @param sOutputFileName the name of the csv file to which the results are
 *           written.
 */
@Transactional(propagation = Propagation.NOT_SUPPORTED, noRollbackFor = { Exception.class })
@RolesAllowed("UseManagementConsole")
public void checkMasterDataValues(String sOutputFileName) {
    final PrintStream ps;
    try {
        ps = new PrintStream(new BufferedOutputStream(new FileOutputStream(sOutputFileName)), true);
    } catch (FileNotFoundException ex) {
        throw new NuclosFatalException(
                StringUtils.getParameterizedExceptionMessage("masterdata.error.missing.file", sOutputFileName),
                ex);
    }

    ps.println("Entit\u00e4t; ID; Fehlermeldung");
    for (MasterDataMetaVO mdmcvo : MasterDataMetaCache.getInstance().getAllMetaData()) {
        final String sEntityName = mdmcvo.getEntityName();
        try {
            for (MasterDataVO mdvo : helper.getGenericMasterData(sEntityName, null, true)) {
                try {
                    // validate each record
                    mdvo.validate(mdmcvo);
                } catch (CommonValidationException ex) {
                    final StringBuilder sbResult = new StringBuilder();
                    sbResult.append(sEntityName);
                    sbResult.append(";");
                    sbResult.append(mdvo.getId());
                    sbResult.append(";");
                    sbResult.append(ex.getMessage());
                    ps.println(sbResult.toString());
                }
            }
        } catch (Exception e) {
            LOG.error("checkMasterDataValues failed: " + e, e);
            error("Error while validating entity " + sEntityName);
        }
    }
    if (ps != null) {
        ps.close();
    }
    if (ps != null && ps.checkError()) {
        throw new NuclosFatalException("Failed to close PrintStream.");
    }
}