Example usage for org.apache.hadoop.mapred JobConf JobConf

List of usage examples for org.apache.hadoop.mapred JobConf JobConf

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf JobConf.

Prototype

public JobConf() 

Source Link

Document

Construct a map/reduce job configuration.

Usage

From source file:com.cloudera.lib.service.hadoop.HadoopService.java

License:Open Source License

protected JobConf createHadoopConf(Configuration conf) {
    JobConf hadoopConf = new JobConf();
    XConfiguration.copy(serviceHadoopConf, hadoopConf);
    XConfiguration.copy(conf, hadoopConf);
    return hadoopConf;
}

From source file:com.cloudera.recordservice.hcatalog.common.TestHCatRSUtil.java

License:Apache License

@Test
public void copyCredentialsToJobConfTest() {
    JobConf conf = new JobConf();
    Credentials cred = new Credentials();
    cred.addToken(new Text("Alias"), new Token<TokenIdentifier>());
    HCatRSUtil.copyCredentialsToJobConf(cred, conf);
    assertEquals(1, conf.getCredentials().numberOfTokens());
}

From source file:com.cloudera.recordservice.tests.ClusterController.java

License:Apache License

/**
 * This method returns a JobConf object that allows a map reduce job to be run
 * on the cluster//  ww w  . j  a  v a 2s.com
 */
public JobConf getJobConf() throws MalformedURLException {
    JobConf conf = new JobConf();
    populateJobConf(conf);
    return conf;
}

From source file:com.cloudera.sqoop.orm.TestParseMethods.java

License:Apache License

public void runParseTest(String fieldTerminator, String lineTerminator, String encloser, String escape,
        boolean encloseRequired) throws IOException {

    ClassLoader prevClassLoader = null;

    String[] argv = getArgv(true, fieldTerminator, lineTerminator, encloser, escape, encloseRequired);
    runImport(argv);/*from   w w  w  .  java2  s. c  o  m*/
    try {
        String tableClassName = getTableName();

        argv = getArgv(false, fieldTerminator, lineTerminator, encloser, escape, encloseRequired);
        SqoopOptions opts = new ImportTool().parseArguments(argv, null, null, true);

        CompilationManager compileMgr = new CompilationManager(opts);
        String jarFileName = compileMgr.getJarFilename();

        // Make sure the user's class is loaded into our address space.
        prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, tableClassName);

        JobConf job = new JobConf();
        job.setJar(jarFileName);

        // Tell the job what class we're testing.
        job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName);

        // use local mode in the same JVM.
        ConfigurationHelper.setJobtrackerAddr(job, "local");
        if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
            job.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
        }
        String warehouseDir = getWarehouseDir();
        Path warehousePath = new Path(warehouseDir);
        Path inputPath = new Path(warehousePath, getTableName());
        Path outputPath = new Path(warehousePath, getTableName() + "-out");

        job.setMapperClass(ReparseMapper.class);
        job.setNumReduceTasks(0);
        FileInputFormat.addInputPath(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        JobClient.runJob(job);
    } catch (InvalidOptionsException ioe) {
        fail(ioe.toString());
    } catch (ParseException pe) {
        fail(pe.toString());
    } finally {
        if (null != prevClassLoader) {
            ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
        }
    }
}

From source file:com.cloudera.sqoop.orm.TestParseMethods.java

License:Apache License

public void testFieldSetter() throws IOException {
    ClassLoader prevClassLoader = null;

    String[] types = { "VARCHAR(32)", "VARCHAR(32)" };
    String[] vals = { "'meep'", "'foo'" };
    createTableWithColTypes(types, vals);

    String[] argv = getArgv(true, ",", "\\n", "\\\'", "\\", false);
    runImport(argv);//from  www.  j  av a 2 s.  com
    try {
        String tableClassName = getTableName();

        argv = getArgv(false, ",", "\\n", "\\\'", "\\", false);
        SqoopOptions opts = new ImportTool().parseArguments(argv, null, null, true);

        CompilationManager compileMgr = new CompilationManager(opts);
        String jarFileName = compileMgr.getJarFilename();

        // Make sure the user's class is loaded into our address space.
        prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, tableClassName);

        JobConf job = new JobConf();
        job.setJar(jarFileName);

        // Tell the job what class we're testing.
        job.set(ExplicitSetMapper.USER_TYPE_NAME_KEY, tableClassName);
        job.set(ExplicitSetMapper.SET_COL_KEY, BASE_COL_NAME + "0");
        job.set(ExplicitSetMapper.SET_VAL_KEY, "this-is-a-test");

        // use local mode in the same JVM.
        ConfigurationHelper.setJobtrackerAddr(job, "local");
        if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
            job.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
        }
        String warehouseDir = getWarehouseDir();
        Path warehousePath = new Path(warehouseDir);
        Path inputPath = new Path(warehousePath, getTableName());
        Path outputPath = new Path(warehousePath, getTableName() + "-out");

        job.setMapperClass(ExplicitSetMapper.class);
        job.setNumReduceTasks(0);
        FileInputFormat.addInputPath(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        JobClient.runJob(job);
    } catch (InvalidOptionsException ioe) {
        fail(ioe.toString());
    } catch (ParseException pe) {
        fail(pe.toString());
    } finally {
        if (null != prevClassLoader) {
            ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
        }
    }
}

From source file:com.dataartisans.flink.cascading.planner.FlinkFlowStep.java

License:Apache License

/**
 * Configures the Flink program for this step
 *//*from  w  w  w  . ja v  a  2s .com*/
public Configuration createInitializedConfig(FlowProcess<Configuration> flowProcess,
        Configuration parentConfig) {

    this.env.getConfig().registerKryoType(Tuple.class);

    Configuration config = parentConfig == null ? new JobConf() : HadoopUtil.copyJobConf(parentConfig);
    config.set("cascading.flow.step.num", Integer.toString(getOrdinal()));
    HadoopUtil.setIsInflow(config);

    this.setConfig(config);

    return config;
}

From source file:com.digitalpebble.behemoth.solr.TestSOLRWriter.java

License:Apache License

@Test
public void testFieldMappings() throws IOException {
    JobConf conf = new JobConf();
    conf.set("solr.server.url", "http://example.org");
    conf.set("solr.f.person", "Person.string");
    conf.set("solr.f.personTitle", "Person.title");
    conf.set("solr.f.location", "Location");

    Progressable progress = new Progressable() {
        @Override//from ww  w.j a va 2  s. co  m
        public void progress() {

        }
    };
    SOLRWriter writer = new SOLRWriter(progress);
    writer.open(conf, "test");

    assertEquals(writer.getFieldMapping().size(), 2);
    assertNotNull(writer.getFieldMapping().get("Person"));
    assertEquals(writer.getFieldMapping().get("Person").size(), 2);
    assertEquals(writer.getFieldMapping().get("Person").get("string"), "person");
    assertEquals(writer.getFieldMapping().get("Person").get("title"), "personTitle");
    assertNotNull(writer.getFieldMapping().get("Location"));
    assertEquals(writer.getFieldMapping().get("Location").size(), 1);
    assertEquals(writer.getFieldMapping().get("Location").get("*"), "location");
}

From source file:com.expedia.edw.hive.udf.FileStrategy.java

License:Open Source License

public void load(String schemaAndTableName, String keyName, String valueName) {
    job = new JobConf();

    File cacheData = clientCache.fetchData(schemaAndTableName, keyName, valueName);
    cacheFileName = cacheData.getName();

    DistributedCache.addCacheFile(cacheData.toURI(), this.job);

}

From source file:com.facebook.hive.orc.TestInputOutputFormat.java

License:Apache License

@Before
public void openFileSystem() throws Exception {
    conf = new JobConf();
    fs = FileSystem.getLocal(conf);
    testFilePath = new Path(workDir, "TestInputOutputFormat." + testCaseName.getMethodName() + ".orc");
    fs.delete(testFilePath, false);//from ww w  .j av a 2 s. co  m
}

From source file:com.facebook.presto.hive.AbstractTestHiveFileFormats.java

License:Apache License

public FileSplit createTestFile(String filePath, HiveOutputFormat<?, ?> outputFormat,
        @SuppressWarnings("deprecation") SerDe serDe, String compressionCodec) throws Exception {
    JobConf jobConf = new JobConf();
    Properties tableProperties = new Properties();
    tableProperties.setProperty("columns", COLUMN_NAMES_STRING);
    tableProperties.setProperty("columns.types", COLUMN_TYPES);
    serDe.initialize(new Configuration(), tableProperties);

    if (compressionCodec != null) {
        CompressionCodec codec = new CompressionCodecFactory(new Configuration())
                .getCodecByName(compressionCodec);
        jobConf.set(COMPRESS_CODEC, codec.getClass().getName());
        jobConf.set(COMPRESS_TYPE, SequenceFile.CompressionType.BLOCK.toString());
    }/*from w w  w  .j a  v a  2s. co m*/

    RecordWriter recordWriter = outputFormat.getHiveRecordWriter(jobConf, new Path(filePath), Text.class,
            compressionCodec != null, tableProperties, new Progressable() {
                @Override
                public void progress() {
                }
            });

    try {
        serDe.initialize(new Configuration(), tableProperties);

        SettableStructObjectInspector objectInspector = getStandardStructObjectInspector(COLUMN_NAMES,
                FIELD_INSPECTORS);
        Object row = objectInspector.create();

        List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());

        for (int rowNumber = 0; rowNumber < NUM_ROWS; rowNumber++) {
            for (int i = 0; i < TEST_VALUES.size(); i++) {
                Object key = TEST_VALUES.get(i).getKey();
                if (key instanceof Slice) {
                    key = ((Slice) key).getBytes();
                }
                objectInspector.setStructFieldData(row, fields.get(i), key);
            }

            Writable record = serDe.serialize(row, objectInspector);
            recordWriter.write(record);
        }
    } finally {
        recordWriter.close(false);
    }

    Path path = new Path(filePath);
    path.getFileSystem(new Configuration()).setVerifyChecksum(true);
    File file = new File(filePath);
    return new FileSplit(path, 0, file.length(), new String[0]);
}