Example usage for org.apache.hadoop.util StringUtils stringifyException

List of usage examples for org.apache.hadoop.util StringUtils stringifyException

Introduction

In this page you can find the example usage for org.apache.hadoop.util StringUtils stringifyException.

Prototype

public static String stringifyException(Throwable e) 

Source Link

Document

Make a string representation of the exception.

Usage

From source file:com.elex.ssp.DomainSuffixes.java

License:Apache License

/** private ctor */
private DomainSuffixes() {
    String file = "domain-suffixes.xml";
    InputStream input = this.getClass().getClassLoader().getResourceAsStream(file);
    try {/*from w  w  w . j  a va  2 s. co m*/
        new DomainSuffixesReader().read(this, input);
    } catch (Exception ex) {
        LOG.warn(StringUtils.stringifyException(ex));
    }
}

From source file:com.elex.ssp.DomainSuffixesReader.java

License:Apache License

void read(DomainSuffixes tldEntries, InputStream input) throws IOException {
    try {/*from  w  ww  . ja  v a2s  . c  o m*/

        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
        factory.setIgnoringComments(true);
        DocumentBuilder builder = factory.newDocumentBuilder();
        Document document = builder.parse(new InputSource(input));

        Element root = document.getDocumentElement();

        if (root != null && root.getTagName().equals("domains")) {

            Element tlds = (Element) root.getElementsByTagName("tlds").item(0);
            Element suffixes = (Element) root.getElementsByTagName("suffixes").item(0);

            //read tlds
            readITLDs(tldEntries, (Element) tlds.getElementsByTagName("itlds").item(0));
            readGTLDs(tldEntries, (Element) tlds.getElementsByTagName("gtlds").item(0));
            readCCTLDs(tldEntries, (Element) tlds.getElementsByTagName("cctlds").item(0));

            readSuffixes(tldEntries, suffixes);
        } else {
            throw new IOException("xml file is not valid");
        }
    } catch (ParserConfigurationException ex) {
        LOG.warn(StringUtils.stringifyException(ex));
        throw new IOException(ex.getMessage());
    } catch (SAXException ex) {
        LOG.warn(StringUtils.stringifyException(ex));
        throw new IOException(ex.getMessage());
    }
}

From source file:com.github.dryangkun.hbase.tidx.hive.HBaseStorageHandler.java

License:Apache License

private HBaseAdmin getHBaseAdmin() throws MetaException {
    try {// w w w.j a v a  2s. com
        if (admin == null) {
            admin = new HBaseAdmin(hbaseConf);
        }
        return admin;
    } catch (IOException ioe) {
        throw new MetaException(StringUtils.stringifyException(ioe));
    }
}

From source file:com.github.dryangkun.hbase.tidx.hive.HBaseStorageHandler.java

License:Apache License

@Override
public void commitDropTable(Table tbl, boolean deleteData) throws MetaException {

    try {//from w w w  .jav  a2s.  c  o  m
        String tableName = getHBaseTableName(tbl);
        boolean isExternal = MetaStoreUtils.isExternalTable(tbl);
        if (deleteData && !isExternal) {
            if (getHBaseAdmin().isTableEnabled(tableName)) {
                getHBaseAdmin().disableTable(tableName);
            }
            getHBaseAdmin().deleteTable(tableName);
        }
    } catch (IOException ie) {
        throw new MetaException(StringUtils.stringifyException(ie));
    }
}

From source file:com.github.dryangkun.hbase.tidx.hive.HBaseStorageHandler.java

License:Apache License

@Override
public void preCreateTable(Table tbl) throws MetaException {
    boolean isExternal = MetaStoreUtils.isExternalTable(tbl);

    // We'd like to move this to HiveMetaStore for any non-native table, but
    // first we need to support storing NULL for location on a table
    if (tbl.getSd().getLocation() != null) {
        throw new MetaException("LOCATION may not be specified for HBase.");
    }/*www .j  ava2s.  co m*/

    HTable htable = null;

    try {
        String tableName = getHBaseTableName(tbl);
        Map<String, String> serdeParam = tbl.getSd().getSerdeInfo().getParameters();
        String hbaseColumnsMapping = serdeParam.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);

        ColumnMappings columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);

        HTableDescriptor tableDesc;

        if (!getHBaseAdmin().tableExists(tableName)) {
            // if it is not an external table then create one
            if (!isExternal) {
                // Create the column descriptors
                tableDesc = new HTableDescriptor(tableName);
                Set<String> uniqueColumnFamilies = new HashSet<String>();

                for (ColumnMapping colMap : columnMappings) {
                    if (!colMap.hbaseRowKey && !colMap.hbaseTimestamp) {
                        uniqueColumnFamilies.add(colMap.familyName);
                    }
                }

                for (String columnFamily : uniqueColumnFamilies) {
                    tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(columnFamily)));
                }

                getHBaseAdmin().createTable(tableDesc);
            } else {
                // an external table
                throw new MetaException("HBase table " + tableName
                        + " doesn't exist while the table is declared as an external table.");
            }

        } else {
            if (!isExternal) {
                throw new MetaException("Table " + tableName + " already exists"
                        + " within HBase; use CREATE EXTERNAL TABLE instead to" + " register it in Hive.");
            }
            // make sure the schema mapping is right
            tableDesc = getHBaseAdmin().getTableDescriptor(Bytes.toBytes(tableName));

            for (ColumnMapping colMap : columnMappings) {

                if (colMap.hbaseRowKey || colMap.hbaseTimestamp) {
                    continue;
                }

                if (!tableDesc.hasFamily(colMap.familyNameBytes)) {
                    throw new MetaException("Column Family " + colMap.familyName
                            + " is not defined in hbase table " + tableName);
                }
            }
        }

        // ensure the table is online
        htable = new HTable(hbaseConf, tableDesc.getName());
    } catch (Exception se) {
        throw new MetaException(StringUtils.stringifyException(se));
    } finally {
        if (htable != null) {
            IOUtils.closeQuietly(htable);
        }
    }
}

From source file:com.github.dryangkun.hbase.tidx.hive.HBaseStorageHandler.java

License:Apache License

@Override
public void rollbackCreateTable(Table table) throws MetaException {
    boolean isExternal = MetaStoreUtils.isExternalTable(table);
    String tableName = getHBaseTableName(table);
    try {/*from  w  ww.  j a  va 2  s.co  m*/
        if (!isExternal && getHBaseAdmin().tableExists(tableName)) {
            // we have created an HBase table, so we delete it to roll back;
            if (getHBaseAdmin().isTableEnabled(tableName)) {
                getHBaseAdmin().disableTable(tableName);
            }
            getHBaseAdmin().deleteTable(tableName);
        }
    } catch (IOException ie) {
        throw new MetaException(StringUtils.stringifyException(ie));
    }
}

From source file:com.github.gaoyangthu.demo.mapred.DBCountPageView.java

License:Apache License

private void shutdown() {
    try {/*from w  w  w.  j ava2s  . co  m*/
        connection.commit();
        connection.close();
    } catch (Throwable ex) {
        LOG.warn("Exception occurred while closing connection :" + StringUtils.stringifyException(ex));
    } finally {
        try {
            if (server != null) {
                server.shutdown();
            }
        } catch (Throwable ex) {
            LOG.warn("Exception occurred while shutting down HSQLDB :" + StringUtils.stringifyException(ex));
        }
    }
}

From source file:com.hortonworks.hbase.replication.bridge.HBaseServer.java

License:Apache License

/**
 * Setup response for the IPC Call.//from w ww  .j a v  a 2  s. c  om
 *
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param status {@link Status} of the IPC call
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, Call call, Status status, Writable rv,
        String errorClass, String error) throws IOException {
    response.reset();
    DataOutputStream out = new DataOutputStream(response);

    if (status == Status.SUCCESS) {
        try {
            rv.write(out);
            call.setResponse(rv, status, null, null);
        } catch (Throwable t) {
            LOG.warn("Error serializing call response for call " + call, t);
            // Call back to same function - this is OK since the
            // buffer is reset at the top, and since status is changed
            // to ERROR it won't infinite loop.
            call.setResponse(null, status.ERROR, t.getClass().getName(), StringUtils.stringifyException(t));
        }
    } else {
        call.setResponse(rv, status, errorClass, error);
    }
}

From source file:com.iflytek.spider.crawl.CrawlDb.java

License:Apache License

public int run(String[] args) throws Exception {
    if (args.length < 2) {
        System.err.println(/*  w  ww  .j a  v a  2 s.  c o m*/
                "Usage: CrawlDb <crawldb> (-dir <segments> | <seg1> <seg2> ...) [-force] [-noAdditions]");
        System.err.println("\tcrawldb\tCrawlDb to update");
        System.err.println("\t-dir segments\tparent directory containing all segments to update from");
        System.err.println("\tseg1 seg2 ...\tlist of segment names to update from");
        System.err.println("\t-force\tforce update even if CrawlDb appears to be locked (CAUTION advised)");
        System.err.println(
                "\t-noAdditions\tonly update already existing URLs, don't add any newly discovered URLs");
        return -1;
    }
    boolean force = false;
    final FileSystem fs = FileSystem.get(getConf());
    boolean additionsAllowed = getConf().getBoolean(CRAWLDB_ADDITIONS_ALLOWED, true);
    HashSet<Path> dirs = new HashSet<Path>();
    for (int i = 1; i < args.length; i++) {
        if (args[i].equals("-force")) {
            force = true;
        } else if (args[i].equals("-noAdditions")) {
            additionsAllowed = false;
        } else if (args[i].equals("-dir")) {
            FileStatus[] paths = fs.listStatus(new Path(args[++i]), HadoopFSUtil.getPassDirectoriesFilter(fs));
            dirs.addAll(Arrays.asList(HadoopFSUtil.getPaths(paths)));
        } else {
            dirs.add(new Path(args[i]));
        }
    }
    try {
        update(new Path(args[0]), dirs.toArray(new Path[dirs.size()]), additionsAllowed, force);
        return 0;
    } catch (Exception e) {
        LOG.fatal("CrawlDb update: " + StringUtils.stringifyException(e));
        return -1;
    }
}

From source file:com.iflytek.spider.crawl.GeneratorSmart.java

License:Apache License

public int run(String[] args) throws Exception {
    if (args.length < 2) {
        System.out.println("Usage: Generator <crawldb> <segments_dir> [-force] [-numFetchers numFetchers]");
        return -1;
    }/*  w  w  w .j a  v a  2  s  .c o m*/

    Path dbDir = new Path(args[0]);
    Path segmentsDir = new Path(args[1]);
    long curTime = System.currentTimeMillis();
    int numFetchers = -1;
    boolean force = false;

    for (int i = 2; i < args.length; i++) {
        if ("-numFetchers".equals(args[i])) {
            numFetchers = Integer.parseInt(args[i + 1]);
            i++;
        } else if ("-force".equals(args[i])) {
            force = true;
        }

    }

    try {
        Path[] segs = generate(dbDir, segmentsDir, numFetchers, curTime, force);
        if (segs == null)
            return -1;
    } catch (Exception e) {
        LOG.fatal("Generator: " + StringUtils.stringifyException(e));
        return -1;
    }
    return 0;
}