Example usage for org.apache.hadoop.util StringUtils stringifyException

List of usage examples for org.apache.hadoop.util StringUtils stringifyException

Introduction

In this page you can find the example usage for org.apache.hadoop.util StringUtils stringifyException.

Prototype

public static String stringifyException(Throwable e) 

Source Link

Document

Make a string representation of the exception.

Usage

From source file:ExportStressTest.java

License:Apache License

@Override
public int run(String[] args) {
    String connectStr = args[0];//from   ww w  .j  ava  2s .  co m
    String username = args[1];

    try {
        createData();
        createTable(connectStr, username);
        runExport(connectStr, username);
    } catch (Exception e) {
        System.err.println("Error: " + StringUtils.stringifyException(e));
        return 1;
    }

    return 0;
}

From source file:ParascaleFileStatus.java

License:Apache License

void loadPermissionInfo() {

    if (permissionLoaded.get()) {
        return;// ww  w. j a  v  a 2  s.co m
    }

    IOException e = null;

    try {
        final StringTokenizer t = new StringTokenizer(getPermissionString());
        // expected format
        // -rw------- 1 username groupname ...

        String permission = t.nextToken();

        if (permission.length() > 10) {
            permission = permission.substring(0, 10);
        }

        setPermission(FsPermission.valueOf(permission));

        t.nextToken();

        setOwner(t.nextToken());
        setGroup(t.nextToken());

    } catch (final Shell.ExitCodeException ioe) {
        if (ioe.getExitCode() != 1) {
            e = ioe;
        } else {
            setPermission(null);
            setOwner(null);
            setGroup(null);
        }

    } catch (final IOException ioe) {
        e = ioe;

    } finally {
        if (e != null) {
            throw new RuntimeException("Error while running command to get " + "file permissions : "
                    + StringUtils.stringifyException(e));
        }
        permissionLoaded.set(true);
    }

}

From source file:CouchbaseExportStressTest.java

License:Apache License

@Override
public int run(String[] args) {
    String connectStr = args[0];//  w  w  w. j  a v  a 2  s.  c o m
    String username = args[1];

    try {
        createData();
        runExport(connectStr, username);
    } catch (Exception e) {
        System.err.println("Error: " + StringUtils.stringifyException(e));
        return 1;
    }

    return 0;
}

From source file:RHHBaseRecorder.java

License:Apache License

@Override
public void setConf(Configuration conf) {
    this.conf = conf;
    RHHBaseRecorder.ValueIsString = conf.get("rhipe_hbase_values_are_string") != null
            && conf.get("rhipe_hbase_values_are_string").equals("TRUE");
    RHHBaseRecorder.SingleCFQ = conf.get("rhipe.hbase.single.cfq") != null
            && conf.get("rhipe.hbase.single.cfq").equals("TRUE");
    String tableName = conf.get(INPUT_TABLE);
    try {/*from ww  w.j av a2  s .  co  m*/
        setHTable(new HTable(HBaseConfiguration.create(conf), tableName));
    } catch (Exception e) {
        LOG.error(StringUtils.stringifyException(e));
    }
    Scan[] scans = null;
    if (conf.get(RHIPE_COLSPEC) != null) {
        try {
            String[] cols = conf.get(RHIPE_COLSPEC).split(",");
            ArrayList<Pair<String, String>> l = null;
            if (cols.length > 0) {
                l = new ArrayList<Pair<String, String>>(cols.length);
                for (int i = 0; i < cols.length; i++) {
                    String[] x = cols[i].split(":");
                    if (x.length == 1) {
                        l.add(new Pair<String, String>(x[0], null));
                        LOG.info("Added family: " + x[0]);
                    } else {
                        l.add(new Pair<String, String>(x[0], x[1]));
                        LOG.info("Added " + x[0] + ":" + x[1]);
                    }
                }
            }
            String[] x = conf.get("rhipe.hbase.mozilla.cacheblocks").split(":");
            scans = Fun.generateScans(conf.get("rhipe.hbase.rowlim.start"), conf.get("rhipe.hbase.rowlim.end"),
                    l, Integer.parseInt(x[0]), Integer.parseInt(x[1]) == 1 ? true : false);
        } catch (Exception e) {
            LOG.error("An error occurred.", e);
        }
    } else {
        //Scan[] scans = null;
        scans = new Scan[] { new Scan() };
        LOG.info("Start Row Key" + Bytes.toStringBinary(
                org.apache.commons.codec.binary.Base64.decodeBase64(conf.get("rhipe.hbase.rowlim.start"))));
        LOG.info("End Row Key" + Bytes.toStringBinary(
                org.apache.commons.codec.binary.Base64.decodeBase64(conf.get("rhipe.hbase.rowlim.end"))));
        //LOG.info("Filter in   " + Bytes.toStringBinary(org.apache.commons.codec.binary.Base64.decodeBase64(conf.get("rhipe.hbase.filter"))));
        //LOG.info("Filter out  " + conf.get("rhipe.hbase.filter"));
        String[] x = conf.get("rhipe.hbase.mozilla.cacheblocks").split(":");
        LOG.info("cache " + Integer.parseInt(x[0]) + " block " + Integer.parseInt(x[1]));
        scans = Fun.generateScansRows(conf.get("rhipe.hbase.rowlim.start"), conf.get("rhipe.hbase.rowlim.end"),
                Integer.parseInt(x[0]), Integer.parseInt(x[1]) == 1 ? true : false,
                conf.get("rhipe.hbase.filter"), Integer.parseInt(conf.get("rhipe.hbase.set.batch")));
        //scans = getAllColumnQualifier(table);            
    }
    setScans(scans);
}

From source file:alluxio.client.hadoop.DFSIOIntegrationTest.java

License:Apache License

/**
 * Runs the integration test for DFS IO.
 *
 * @param args arguments/* w w  w.j a va 2 s . c o m*/
 */
public static void main(String[] args) {
    DFSIOIntegrationTest bench = new DFSIOIntegrationTest();
    int res;
    try {
        res = ToolRunner.run(bench, args);
    } catch (Exception e) {
        System.err.print(StringUtils.stringifyException(e));
        res = -2;
    }
    if (res == -1) {
        System.err.print(USAGE);
    }
    System.exit(res);
}

From source file:alluxio.hadoop.fs.DFSIOIntegrationTest.java

License:Apache License

public static void main(String[] args) {
    DFSIOIntegrationTest bench = new DFSIOIntegrationTest();
    int res;/*  ww  w  . ja  v a  2s.  c om*/
    try {
        res = ToolRunner.run(bench, args);
    } catch (Exception e) {
        System.err.print(StringUtils.stringifyException(e));
        res = -2;
    }
    if (res == -1) {
        System.err.print(USAGE);
    }
    System.exit(res);
}

From source file:c5db.tablet.hregionbridge.HRegionBridge.java

License:Apache License

/**
 * @param t The exception to stringify./*from  ww w  .jav a2 s .  c o m*/
 * @return NameValuePair of the exception name to stringified version os exception.
 */
private NameBytesPair buildException(final Throwable t) {
    return new NameBytesPair(t.getClass().getName(),
            ByteBuffer.wrap(Bytes.toBytes(StringUtils.stringifyException(t))));
}

From source file:cascading.hbase.helper.TableInputFormat.java

License:Apache License

public void configure(JobConf job) {
    //        Path[] tableNames = FileInputFormat.getInputPaths(job);
    //        String colArg = job.get(COLUMN_LIST);
    //        String[] colNames = colArg.split(" ");
    //        byte [][] m_cols = new byte[colNames.length][];
    //        for (int i = 0; i < m_cols.length; i++) {
    //            m_cols[i] = Bytes.toBytes(colNames[i]);
    //        }//from  w w w . j  a v a2  s. c om
    //        setInputColumns(m_cols);
    //        try {
    //            setHTable(new HTable(HBaseConfiguration.create(job), tableNames[0].getName()));
    //        } catch (Exception e) {
    //            LOG.error(StringUtils.stringifyException(e));
    //        }

    //this.conf = configuration;
    String tableName = job.get(INPUT_TABLE);
    try {
        setHTable(new HTable(new Configuration(job), tableName));
    } catch (Exception e) {
        LOG.error(StringUtils.stringifyException(e));
    }

    Scan scan = null;

    if (job.get(SCAN) != null) {
        try {
            scan = convertStringToScan(job.get(SCAN));
        } catch (IOException e) {
            LOG.error("An error occurred.", e);
        }
    } else {
        try {
            scan = new Scan();

            if (job.get(SCAN_COLUMNS) != null) {
                addColumns(scan, job.get(SCAN_COLUMNS));
            }

            if (job.get(SCAN_COLUMN_FAMILY) != null) {
                scan.addFamily(Bytes.toBytes(job.get(SCAN_COLUMN_FAMILY)));
            }

            if (job.get(SCAN_TIMESTAMP) != null) {
                scan.setTimeStamp(Long.parseLong(job.get(SCAN_TIMESTAMP)));
            }

            if (job.get(SCAN_TIMERANGE_START) != null && job.get(SCAN_TIMERANGE_END) != null) {
                scan.setTimeRange(Long.parseLong(job.get(SCAN_TIMERANGE_START)),
                        Long.parseLong(job.get(SCAN_TIMERANGE_END)));
            }

            if (job.get(SCAN_MAXVERSIONS) != null) {
                scan.setMaxVersions(Integer.parseInt(job.get(SCAN_MAXVERSIONS)));
            }

            if (job.get(SCAN_CACHEDROWS) != null) {
                scan.setCaching(Integer.parseInt(job.get(SCAN_CACHEDROWS)));
            }

            // false by default, full table scans generate too much BC churn
            scan.setCacheBlocks((job.getBoolean(SCAN_CACHEBLOCKS, false)));
        } catch (Exception e) {
            LOG.error(StringUtils.stringifyException(e));
        }
    }

    setScan(scan);
}

From source file:cascading.hbase.helper.TableRecordReaderImpl.java

License:Apache License

/**
 * @param key HStoreKey as input key./*  w  w w . j  ava 2s.c  om*/
 * @param value MapWritable as input value
 * @return true if there was more data
 * @throws IOException
 */
public boolean next(ImmutableBytesWritable key, Result value) throws IOException {
    Result result;
    try {
        result = this.scanner.next();
    } catch (DoNotRetryIOException e) {
        throw e;
    } catch (IOException e) {
        LOG.debug("recovered from " + StringUtils.stringifyException(e));
        if (lastSuccessfulRow == null) {
            LOG.warn("We are restarting the first next() invocation,"
                    + " if your mapper's restarted a few other times like this"
                    + " then you should consider killing this job and investigate"
                    + " why it's taking so long.");
        }
        if (lastSuccessfulRow == null) {
            restart(scan.getStartRow());
        } else {
            restart(lastSuccessfulRow);
            this.scanner.next(); // skip presumed already mapped row
        }
        result = this.scanner.next();
    }

    if (result != null && result.size() > 0) {
        key.set(result.getRow());
        lastSuccessfulRow = key.get();
        Writables.copyWritable(result, value);
        return true;
    }
    return false;
}

From source file:cn.itcast.hadoop.mr.wordcount.DBCountPageView.java

License:Apache License

private void shutdown() { //?
    try {// www  . jav  a 2  s.  co m
        connection.commit();
        connection.close();
    } catch (Throwable ex) {
        LOG.warn("Exception occurred while closing connection :" + StringUtils.stringifyException(ex));
    } finally {
        try {
            if (server != null) {
                server.shutdown();
            }
        } catch (Throwable ex) {
            LOG.warn("Exception occurred while shutting down HSQLDB :" + StringUtils.stringifyException(ex));
        }
    }
}