Example usage for java.lang IndexOutOfBoundsException printStackTrace

List of usage examples for java.lang IndexOutOfBoundsException printStackTrace

Introduction

In this page you can find the example usage for java.lang IndexOutOfBoundsException printStackTrace.

Prototype

public void printStackTrace(PrintStream s) 

Source Link

Document

Prints this throwable and its backtrace to the specified print stream.

Usage

From source file:org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanTask.java

public static void main(String[] args) {
    String inputPathStr = null;/*from  ww  w.java 2  s  .  com*/
    String outputDir = null;
    String jobConfFileName = null;

    try {
        for (int i = 0; i < args.length; i++) {
            if (args[i].equals("-input")) {
                inputPathStr = args[++i];
            } else if (args[i].equals("-jobconffile")) {
                jobConfFileName = args[++i];
            } else if (args[i].equals("-outputDir")) {
                outputDir = args[++i];
            }
        }
    } catch (IndexOutOfBoundsException e) {
        System.err.println("Missing argument to option");
        printUsage();
    }

    if (inputPathStr == null || outputDir == null || outputDir.trim().equals("")) {
        printUsage();
    }

    List<Path> inputPaths = new ArrayList<Path>();
    String[] paths = inputPathStr.split(INPUT_SEPERATOR);
    if (paths == null || paths.length == 0) {
        printUsage();
    }

    FileSystem fs = null;
    JobConf conf = new JobConf(PartialScanTask.class);
    for (String path : paths) {
        try {
            Path pathObj = new Path(path);
            if (fs == null) {
                fs = FileSystem.get(pathObj.toUri(), conf);
            }
            FileStatus fstatus = fs.getFileStatus(pathObj);
            if (fstatus.isDir()) {
                FileStatus[] fileStatus = fs.listStatus(pathObj);
                for (FileStatus st : fileStatus) {
                    inputPaths.add(st.getPath());
                }
            } else {
                inputPaths.add(fstatus.getPath());
            }
        } catch (IOException e) {
            e.printStackTrace(System.err);
        }
    }

    if (jobConfFileName != null) {
        conf.addResource(new Path(jobConfFileName));
    }

    org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : ((Logger) LogManager.getRootLogger()).getAppenders().values()) {
        if (appender instanceof FileAppender) {
            console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName());
        } else if (appender instanceof RollingFileAppender) {
            console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName());
        }
    }

    QueryState queryState = new QueryState.Builder().withHiveConf(new HiveConf(conf, PartialScanTask.class))
            .build();
    PartialScanWork mergeWork = new PartialScanWork(inputPaths);
    DriverContext driverCxt = new DriverContext();
    PartialScanTask taskExec = new PartialScanTask();
    taskExec.initialize(queryState, null, driverCxt, new CompilationOpContext());
    taskExec.setWork(mergeWork);
    int ret = taskExec.execute(driverCxt);

    if (ret != 0) {
        System.exit(2);
    }

}

From source file:org.apache.hadoop.hive.ql.io.merge.BlockMergeTask.java

public static void main(String[] args) {
    String inputPathStr = null;//from w  w w.  j a v  a  2s. c o m
    String outputDir = null;
    String jobConfFileName = null;
    Class<? extends Mapper> mapperClass = RCFileMergeMapper.class;
    Class<? extends FileInputFormat> inputFormatClass = RCFileBlockMergeInputFormat.class;

    try {
        for (int i = 0; i < args.length; i++) {
            if (args[i].equals("-input")) {
                inputPathStr = args[++i];
            } else if (args[i].equals("-jobconffile")) {
                jobConfFileName = args[++i];
            } else if (args[i].equals("-outputDir")) {
                outputDir = args[++i];
            } else if (args[i].equals("-inputformat")) {
                String inputFormat = args[++i];
                if (inputFormat.equalsIgnoreCase("ORC")) {
                    mapperClass = OrcMergeMapper.class;
                    inputFormatClass = OrcBlockMergeInputFormat.class;
                } else if (!inputFormat.equalsIgnoreCase("RCFile")) {
                    System.err.println("Only RCFile and OrcFile inputs are supported.");
                    printUsage();
                }
            }
        }
    } catch (IndexOutOfBoundsException e) {
        System.err.println("Missing argument to option");
        printUsage();
    }

    if (inputPathStr == null || outputDir == null || outputDir.trim().equals("")) {
        printUsage();
    }

    List<String> inputPaths = new ArrayList<String>();
    String[] paths = inputPathStr.split(INPUT_SEPERATOR);
    if (paths == null || paths.length == 0) {
        printUsage();
    }

    FileSystem fs = null;
    JobConf conf = new JobConf(BlockMergeTask.class);
    for (String path : paths) {
        try {
            Path pathObj = new Path(path);
            if (fs == null) {
                fs = FileSystem.get(pathObj.toUri(), conf);
            }
            FileStatus fstatus = fs.getFileStatus(pathObj);
            if (fstatus.isDir()) {
                FileStatus[] fileStatus = fs.listStatus(pathObj);
                for (FileStatus st : fileStatus) {
                    inputPaths.add(st.getPath().toString());
                }
            } else {
                inputPaths.add(fstatus.getPath().toString());
            }
        } catch (IOException e) {
            e.printStackTrace(System.err);
        }
    }

    if (jobConfFileName != null) {
        conf.addResource(new Path(jobConfFileName));
    }
    HiveConf hiveConf = new HiveConf(conf, BlockMergeTask.class);

    Log LOG = LogFactory.getLog(BlockMergeTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : Collections
            .list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) {
        if (appender instanceof FileAppender) {
            console.printInfo("Execution log at: " + ((FileAppender) appender).getFile());
        }
    }

    MergeWork mergeWork = new MergeWork(inputPaths, outputDir, mapperClass, inputFormatClass);
    DriverContext driverCxt = new DriverContext();
    BlockMergeTask taskExec = new BlockMergeTask();
    taskExec.initialize(hiveConf, null, driverCxt);
    taskExec.setWork(mergeWork);
    int ret = taskExec.execute(driverCxt);

    if (ret != 0) {
        System.exit(2);
    }

}

From source file:org.apache.hadoop.hive.ql.io.rcfile.merge.BlockMergeTask.java

public static void main(String[] args) {

    ArrayList<String> jobConfArgs = new ArrayList<String>();

    String inputPathStr = null;//  w  w w. j ava  2  s .c o m
    String outputDir = null;

    try {
        for (int i = 0; i < args.length; i++) {
            if (args[i].equals("-input")) {
                inputPathStr = args[++i];
            } else if (args[i].equals("-jobconf")) {
                jobConfArgs.add(args[++i]);
            } else if (args[i].equals("-outputDir")) {
                outputDir = args[++i];
            }
        }
    } catch (IndexOutOfBoundsException e) {
        System.err.println("Missing argument to option");
        printUsage();
    }

    if (inputPathStr == null || outputDir == null || outputDir.trim().equals("")) {
        printUsage();
    }

    List<String> inputPaths = new ArrayList<String>();
    String[] paths = inputPathStr.split(INPUT_SEPERATOR);
    if (paths == null || paths.length == 0) {
        printUsage();
    }

    FileSystem fs = null;
    JobConf conf = new JobConf(BlockMergeTask.class);
    for (String path : paths) {
        try {
            Path pathObj = new Path(path);
            if (fs == null) {
                fs = FileSystem.get(pathObj.toUri(), conf);
            }
            FileStatus fstatus = fs.getFileStatus(pathObj);
            if (fstatus.isDir()) {
                FileStatus[] fileStatus = fs.listStatus(pathObj);
                for (FileStatus st : fileStatus) {
                    inputPaths.add(st.getPath().toString());
                }
            } else {
                inputPaths.add(fstatus.getPath().toString());
            }
        } catch (IOException e) {
            e.printStackTrace(System.err);
        }
    }

    StringBuilder sb = new StringBuilder("JobConf:\n");

    for (String one : jobConfArgs) {
        int eqIndex = one.indexOf('=');
        if (eqIndex != -1) {
            try {
                String key = one.substring(0, eqIndex);
                String value = URLDecoder.decode(one.substring(eqIndex + 1), "UTF-8");
                conf.set(key, value);
                sb.append(key).append("=").append(value).append("\n");
            } catch (UnsupportedEncodingException e) {
                System.err.println(
                        "Unexpected error " + e.getMessage() + " while encoding " + one.substring(eqIndex + 1));
                System.exit(3);
            }
        }
    }
    HiveConf hiveConf = new HiveConf(conf, BlockMergeTask.class);

    Log LOG = LogFactory.getLog(BlockMergeTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : Collections
            .list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) {
        if (appender instanceof FileAppender) {
            console.printInfo("Execution log at: " + ((FileAppender) appender).getFile());
        }
    }

    // log the list of job conf parameters for reference
    LOG.info(sb.toString());

    MergeWork mergeWork = new MergeWork(inputPaths, outputDir);
    DriverContext driverCxt = new DriverContext();
    BlockMergeTask taskExec = new BlockMergeTask();
    taskExec.initialize(hiveConf, null, driverCxt);
    taskExec.setWork(mergeWork);
    int ret = taskExec.execute(driverCxt);

    if (ret != 0) {
        System.exit(2);
    }

}

From source file:org.intermine.web.logic.session.SessionMethods.java

/**
 * Executes an action and call a callback when it completes successfully. If the
 * query fails for some reason, this method returns false and ActionErrors are set on the
 * request.//from  ww  w.ja va2 s  .  c o m
 *
 * @param session   the http session
 * @param resources message resources
 * @param qid       the query id
 * @param action    the action/query to perform in a new thread
 * @param completionCallBack sets the method to call when the action successfully completes
 * @return  true if query ran successfully, false if an error occured
 * @throws  Exception if getting results info from paged results fails
 */
public static boolean runQuery(final HttpSession session, final MessageResources resources, final String qid,
        final Action action, final CompletionCallBack completionCallBack) throws Exception {
    final InterMineAPI im = getInterMineAPI(session);
    final ObjectStore os = im.getObjectStore();

    final ObjectStoreInterMineImpl ios;
    if (os instanceof ObjectStoreInterMineImpl) {
        ios = (ObjectStoreInterMineImpl) os;
    } else {
        ios = null;
    }
    Map<String, QueryMonitor> queries = getRunningQueries(session);
    QueryMonitor monitor = queries.get(qid);
    // A reference to this runnable is used as a token for registering
    // a cancelling the running query

    RunQueryThread runnable = new RunQueryThread() {
        @Override
        public void run() {
            try {

                // Register request id for query on this thread
                // We do this before setting r
                if (ios != null) {
                    LOG.debug("Registering request id " + this);
                    ios.registerRequestId(this);
                }

                // call this so that if an exception occurs we notice now rather than in the
                // JSP code
                try {
                    action.process();
                } catch (IndexOutOfBoundsException err) {
                    // no results - ignore
                    // we don't call size() first to avoid this exception because that could be
                    // very slow on a large results set
                } catch (RuntimeException e) {
                    if (e.getCause() instanceof ObjectStoreException) {
                        throw (ObjectStoreException) e.getCause();
                    }
                    throw e;
                }
            } catch (ObjectStoreException e) {
                // put stack trace in the log
                LOG.error("Exception", e);

                String key = (e instanceof ObjectStoreQueryDurationException)
                        ? "errors.query.estimatetimetoolong"
                        : "errors.query.objectstoreerror";
                recordError(resources.getMessage(key), session);

                error = true;
            } catch (Throwable err) {
                StringWriter sw = new StringWriter();
                err.printStackTrace(new PrintWriter(sw));
                recordError(sw.toString(), session);
                LOG.error("Exception", err);
                error = true;
            } finally {
                try {
                    LOG.debug("Deregistering request id " + this);
                    ((ObjectStoreInterMineImpl) os).deregisterRequestId(this);
                } catch (ObjectStoreException e1) {
                    LOG.error("Exception", e1);
                    error = true;
                }
            }
        }
    };
    Thread thread = null;
    thread = new Thread(runnable);
    thread.start();

    while (thread.isAlive()) {
        Thread.sleep(1000);
        if (monitor != null) {
            boolean cancelled = monitor.shouldCancelQuery();
            if (cancelled && ios != null) {
                LOG.debug("Cancelling request " + runnable);
                ios.cancelRequest(runnable);
                monitor.queryCancelled();
                return false;
            }
        }
    }

    if (runnable.isError()) {
        if (monitor != null) {
            monitor.queryCancelledWithError();
        }
        return false;
    }

    if (completionCallBack != null) {
        completionCallBack.complete();
    }

    if (monitor != null) {
        monitor.queryCompleted();
    }

    return true;
}