Example usage for org.apache.hadoop.mapred JobConf setClassLoader

List of usage examples for org.apache.hadoop.mapred JobConf setClassLoader

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setClassLoader.

Prototype

public void setClassLoader(ClassLoader classLoader) 

Source Link

Document

Set the class loader that will be used to load the various objects.

Usage

From source file:org.mitre.ccv.mapred.CalculateKmerRevisedRelativeEntropy.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf());
    boolean cleanLogs = false;

    // @TODO: use commons getopts
    List<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {//from   w w w.  j  av  a  2s .  c  o m
            if ("-m".equals(args[i])) {
                conf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                conf.setNumReduceTasks(Integer.parseInt(args[++i]));
            } else if ("-c".equals(args[i])) {
                cleanLogs = true;
            } else if ("-t".equals(args[i])) {
                conf.setBoolean(TEXT_OUTPUT, true);
            } else if ("-libjars".equals(args[i])) {
                conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf));

                URL[] libjars = FileUtils.getLibJars(conf);
                if (libjars != null && libjars.length > 0) {
                    // Add libjars to client/tasks classpath
                    conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
                    // Adds libjars to our classpath
                    Thread.currentThread().setContextClassLoader(
                            new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
                }
            } else {
                other_args.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            return printUsage();
        }
    }
    // Make sure there are exactly 2 parameters left.
    if (other_args.size() != 3) {
        System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 3.");
        return printUsage();
    }

    //return initJob(conf, inTable, sb.toString().trim(), new Path(other_args.get(1)));
    return initJob(conf, other_args.get(0), other_args.get(1), other_args.get(2), cleanLogs);

}

From source file:org.mitre.ccv.mapred.CompleteCompositionVectorUtils.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf());

    ArrayList<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {//from  w ww . j  a  v a  2  s. c om
            if ("-m".equals(args[i])) {
                conf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                conf.setNumReduceTasks(Integer.parseInt(args[++i]));
            } else if ("-libjars".equals(args[i])) {
                conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf));

                URL[] libjars = FileUtils.getLibJars(conf);
                if (libjars != null && libjars.length > 0) {
                    // Add libjars to client/tasks classpath
                    conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
                    // Adds libjars to our classpath
                    Thread.currentThread().setContextClassLoader(
                            new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
                }
            } else {
                other_args.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            return printUsage();
        }
    }
    // Make sure there are exactly 2 parameters left.
    if (other_args.size() < 1) {
        System.out.println("ERROR: Require ONE argument!");
        return printUsage();
    }

    String cmd = other_args.get(0);
    if (cmd.equals("featureVectors2Json")) {
        if (other_args.size() >= 7) {
            try {
                int start = Integer.parseInt(other_args.get(1));
                int end = Integer.parseInt(other_args.get(2));
                int kmers = Integer.parseInt(other_args.get(3));
                featureVectors2Json(conf, start, end, kmers, other_args.get(4), other_args.get(5),
                        other_args.get(6));
            } catch (NumberFormatException except) {
                System.err.println("Woops. Error converting number!");
                return -1;
            }
        } else {
            System.err.println("We need more arguments!");
            return -1;
        }
    } else if (cmd.equals("featureVectors2rows")) {
        int digits = 6;
        if (other_args.size() > 3) {
            try {
                digits = Integer.parseInt(other_args.get(1));
                featureVectors2RowMajorMatrix(conf, other_args.get(2), other_args.get(3), digits);
            } catch (NumberFormatException except) {
                System.err.println("Woops. Error converting number!");
                return -1;
            }
        } else {
            featureVectors2RowMajorMatrix(conf, other_args.get(1), other_args.get(2), digits);
        }
    } else {
        System.out.println("Unknown command:" + cmd);
        return -1;
    }
    return 0;
}

From source file:org.mitre.ccv.mapred.GenerateFeatureVectors.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf());
    int cardinality = Integer.MAX_VALUE;
    boolean cleanLogs = false;
    String listInput = null;/*from  w  ww .j  a  v a  2s  .co m*/

    // @TODO: use commons getopts, org.apache.hadoop.util.GenericOptionsParser used it
    ArrayList<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {
            if ("-m".equals(args[i])) {
                conf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                conf.setNumReduceTasks(Integer.parseInt(args[++i]));
            } else if ("-c".equals(args[i])) {
                cleanLogs = true;
            } else if ("-l".equals(args[i])) {
                listInput = args[++i];
            } else if ("-t".equals(args[i])) {
                cardinality = Integer.parseInt(args[++i]);
            } else if ("-libjars".equals(args[i])) {
                conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf));

                URL[] libjars = FileUtils.getLibJars(conf);
                if (libjars != null && libjars.length > 0) {
                    // Add libjars to client/tasks classpath
                    conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
                    // Adds libjars to our classpath
                    Thread.currentThread().setContextClassLoader(
                            new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
                }
            } else {
                other_args.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            return printUsage();
        }
    }
    // Make sure there are exactly 2 parameters left.
    if (other_args.size() != 2) {
        System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 3.");
        return printUsage();
    }

    if (listInput == null || listInput.length() == 0) {
        System.out.println("Need kmer sequence file path!");
        return printUsage();
    }

    long now = System.currentTimeMillis();
    Path listInputPath = new Path(listInput);
    Path listOutputPath = new Path(listInputPath.getParent(), "kmer_" + Long.toHexString(now) + "_tmp");
    LOG.info(String.format("Loading %d sorted k-mers from %s to %s", cardinality, listInputPath.toString(),
            listOutputPath.toString()));
    int num = CompleteCompositionVectorUtils.flattenKmerEntropySequenceFile(conf, cardinality,
            listInputPath.toString(), listOutputPath.toString(), cleanLogs);

    initJob(conf, listOutputPath.toString(), num, other_args.get(0), other_args.get(1), cleanLogs);
    return 0;
}

From source file:org.mitre.ccv.mapred.InvertKmerProbabilities.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf());
    boolean cleanLogs = false;

    List<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {/*  w ww  .j av  a 2s  . c om*/
            if ("-m".equals(args[i])) {
                conf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                conf.setNumReduceTasks(Integer.parseInt(args[++i]));
            } else if ("-c".equals(args[i])) {
                cleanLogs = true;
            } else if ("-libjars".equals(args[i])) {
                conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf));

                URL[] libjars = FileUtils.getLibJars(conf);
                if (libjars != null && libjars.length > 0) {
                    // Add libjars to client/tasks classpath
                    conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
                    // Adds libjars to our classpath
                    Thread.currentThread().setContextClassLoader(
                            new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
                }
            } else {
                other_args.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            return printUsage();
        }
    }
    // Make sure there are exactly 2 parameters left.
    if (other_args.size() != 2) {
        System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 2.");
        return printUsage();
    }

    return initJob(conf, other_args.get(0), other_args.get(1), cleanLogs);

}

From source file:org.mitre.ccv.mapred.SortKmerRevisedRelativeEntropies.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf());
    boolean cleanLogs = false;

    // @TODO: use commons getopts, org.apache.hadoop.util.GenericOptionsParser used it
    ArrayList<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {/* w ww  .j a v  a 2 s.c  o m*/
            if ("-m".equals(args[i])) {
                conf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                conf.setNumReduceTasks(Integer.parseInt(args[++i]));
            } else if ("-c".equals(args[i])) {
                cleanLogs = true;
            } else if ("-t".equals(args[i])) {
                conf.setBoolean(TEXT_OUTPUT, true);
            } else if ("-libjars".equals(args[i])) {
                conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf));

                URL[] libjars = FileUtils.getLibJars(conf);
                if (libjars != null && libjars.length > 0) {
                    // Add libjars to client/tasks classpath
                    conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
                    // Adds libjars to our classpath
                    Thread.currentThread().setContextClassLoader(
                            new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
                }
            } else {
                other_args.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            return printUsage();
        }
    }
    // Make sure there are exactly 2 parameters left.
    if (other_args.size() != 2) {
        System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 3.");
        return printUsage();
    }

    return initJob(conf, other_args.get(0), other_args.get(1), cleanLogs);
}

From source file:org.mitre.ccv.weka.mapred.ClassifyInstances.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf());

    ArrayList<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {/*from w  w  w. jav  a  2  s.c o m*/
            if ("-m".equals(args[i])) {
                conf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                conf.setNumReduceTasks(Integer.parseInt(args[++i]));
            } else if ("-D".equals(args[i])) {
                String[] props = args[++i].split("=");
                conf.set(props[0], props[1]);
            } else if ("-libjars".equals(args[i])) {
                conf.set("tmpjars", FileUtils.validateFiles(args[++i], conf));

                URL[] libjars = FileUtils.getLibJars(conf);
                if (libjars != null && libjars.length > 0) {
                    // Add libjars to client/tasks classpath
                    conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
                    // Adds libjars to our classpath
                    Thread.currentThread().setContextClassLoader(
                            new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader()));
                }
            } else if ("-C".equals(args[i])) {
                conf.set(CLASSIFIER, args[++i]);
            } else {
                other_args.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            return printUsage();
        }
    }
    // Make sure there are exactly 3 parameters left.
    if (other_args.size() != 3) {
        System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 3.");

        return printUsage();
    }

    return initJob(conf, other_args.get(0), other_args.get(1), other_args.get(2));
}

From source file:org.springframework.data.hadoop.mapreduce.JobFactoryBean.java

License:Apache License

@SuppressWarnings("rawtypes")
public void afterPropertiesSet() throws Exception {
    final Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);

    buildGenericOptions(cfg);/* ww w  .  j a  va 2s. com*/

    if (StringUtils.hasText(user)) {
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                UserGroupInformation.getLoginUser());
        ugi.doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                job = new Job(cfg);
                return null;
            }
        });
    } else {
        job = new Job(cfg);
    }

    ClassLoader loader = (beanClassLoader != null ? beanClassLoader
            : org.springframework.util.ClassUtils.getDefaultClassLoader());

    if (jar != null) {
        JobConf conf = (JobConf) job.getConfiguration();
        conf.setJar(jar.getURI().toString());
        loader = ExecutionUtils.createParentLastClassLoader(jar, beanClassLoader, cfg);
        conf.setClassLoader(loader);
    }

    // set first to enable auto-detection of K/V to skip the key/value types to be specified
    if (mapper != null) {
        Class<? extends Mapper> mapperClass = resolveClass(mapper, loader, Mapper.class);
        job.setMapperClass(mapperClass);
        configureMapperTypesIfPossible(job, mapperClass);
    }

    if (reducer != null) {
        Class<? extends Reducer> reducerClass = resolveClass(reducer, loader, Reducer.class);
        job.setReducerClass(reducerClass);
        configureReducerTypesIfPossible(job, reducerClass);
    }

    if (StringUtils.hasText(name)) {
        job.setJobName(name);
    }
    if (combiner != null) {
        job.setCombinerClass(resolveClass(combiner, loader, Reducer.class));
    }
    if (groupingComparator != null) {
        job.setGroupingComparatorClass(resolveClass(groupingComparator, loader, RawComparator.class));
    }
    if (inputFormat != null) {
        job.setInputFormatClass(resolveClass(inputFormat, loader, InputFormat.class));
    }
    if (mapKey != null) {
        job.setMapOutputKeyClass(resolveClass(mapKey, loader, Object.class));
    }
    if (mapValue != null) {
        job.setMapOutputValueClass(resolveClass(mapValue, loader, Object.class));
    }
    if (numReduceTasks != null) {
        job.setNumReduceTasks(numReduceTasks);
    }
    if (key != null) {
        job.setOutputKeyClass(resolveClass(key, loader, Object.class));
    }
    if (value != null) {
        job.setOutputValueClass(resolveClass(value, loader, Object.class));
    }
    if (outputFormat != null) {
        job.setOutputFormatClass(resolveClass(outputFormat, loader, OutputFormat.class));
    }
    if (partitioner != null) {
        job.setPartitionerClass(resolveClass(partitioner, loader, Partitioner.class));
    }
    if (sortComparator != null) {
        job.setSortComparatorClass(resolveClass(sortComparator, loader, RawComparator.class));
    }
    if (StringUtils.hasText(workingDir)) {
        job.setWorkingDirectory(new Path(workingDir));
    }
    if (jarClass != null) {
        job.setJarByClass(jarClass);
    }

    if (!CollectionUtils.isEmpty(inputPaths)) {
        for (String path : inputPaths) {
            FileInputFormat.addInputPath(job, new Path(path));
        }
    }

    if (StringUtils.hasText(outputPath)) {
        FileOutputFormat.setOutputPath(job, new Path(outputPath));
    }

    if (compressOutput != null) {
        FileOutputFormat.setCompressOutput(job, compressOutput);
    }

    if (codecClass != null) {
        FileOutputFormat.setOutputCompressorClass(job,
                resolveClass(codecClass, loader, CompressionCodec.class));
    }

    processJob(job);
}

From source file:voldemort.store.readonly.mr.azkaban.AbstractHadoopJob.java

License:Apache License

public static void setClassLoaderAndJar(JobConf conf, Class jobClass) {
    conf.setClassLoader(Thread.currentThread().getContextClassLoader());
    String jar = HadoopUtils.findContainingJar(jobClass, Thread.currentThread().getContextClassLoader());
    if (jar != null) {
        conf.setJar(jar);/*from  www .j  a v a  2 s. c o  m*/
    }
}