List of usage examples for org.apache.zookeeper Shell WINDOWS
boolean WINDOWS
To view the source code for org.apache.zookeeper Shell WINDOWS.
Click Source Link
From source file:org.apache.hadoop.mapred.TestJavaSerialization.java
License:Apache License
@Test public void testMapReduceJob() throws Exception { Assume.assumeFalse(Shell.WINDOWS); JobConf conf = new JobConf(TestJavaSerialization.class); conf.setJobName("JavaSerialization"); FileSystem fs = FileSystem.get(conf); cleanAndCreateInput(fs);/*from w w w .ja va 2 s .c om*/ conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization," + "org.apache.hadoop.io.serializer.WritableSerialization"); conf.setInputFormat(TextInputFormat.class); conf.setOutputKeyClass(String.class); conf.setOutputValueClass(Long.class); conf.setOutputKeyComparatorClass(JavaSerializationComparator.class); conf.setMapperClass(WordCountMapper.class); conf.setReducerClass(SumReducer.class); conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); FileInputFormat.setInputPaths(conf, INPUT_DIR); FileOutputFormat.setOutputPath(conf, OUTPUT_DIR); String inputFileContents = FileUtils.readFileToString(new File(INPUT_FILE.toUri().getPath())); assertTrue("Input file contents not as expected; contents are '" + inputFileContents + "', expected \"b a\n\" ", inputFileContents.equals("b a\n")); JobClient.runJob(conf); Path[] outputFiles = FileUtil .stat2Paths(fs.listStatus(OUTPUT_DIR, new Utils.OutputFileUtils.OutputFilesFilter())); assertEquals(1, outputFiles.length); InputStream is = fs.open(outputFiles[0]); String reduceOutput = org.apache.commons.io.IOUtils.toString(is); String[] lines = reduceOutput.split(System.getProperty("line.separator")); assertEquals("Unexpected output; received output '" + reduceOutput + "'", "a\t1", lines[0]); assertEquals("Unexpected output; received output '" + reduceOutput + "'", "b\t1", lines[1]); assertEquals("Reduce output has extra lines; output is '" + reduceOutput + "'", 2, lines.length); is.close(); }
From source file:org.apache.hadoop.mapred.TestJavaSerialization.java
License:Apache License
/** * HADOOP-4466:/* w w w. j a v a 2 s . c om*/ * This test verifies the JavSerialization impl can write to * SequenceFiles. by virtue other SequenceFileOutputFormat is not * coupled to Writable types, if so, the job will fail. * */ @Test public void testWriteToSequencefile() throws Exception { Assume.assumeFalse(Shell.WINDOWS); JobConf conf = new JobConf(TestJavaSerialization.class); conf.setJobName("JavaSerialization"); FileSystem fs = FileSystem.get(conf); cleanAndCreateInput(fs); conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization," + "org.apache.hadoop.io.serializer.WritableSerialization"); conf.setInputFormat(TextInputFormat.class); // test we can write to sequence files conf.setOutputFormat(SequenceFileOutputFormat.class); conf.setOutputKeyClass(String.class); conf.setOutputValueClass(Long.class); conf.setOutputKeyComparatorClass(JavaSerializationComparator.class); conf.setMapperClass(WordCountMapper.class); conf.setReducerClass(SumReducer.class); conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); FileInputFormat.setInputPaths(conf, INPUT_DIR); FileOutputFormat.setOutputPath(conf, OUTPUT_DIR); JobClient.runJob(conf); Path[] outputFiles = FileUtil .stat2Paths(fs.listStatus(OUTPUT_DIR, new Utils.OutputFileUtils.OutputFilesFilter())); assertEquals(1, outputFiles.length); }
From source file:org.apache.hadoop.mapred.TestLocalJobSubmission.java
License:Apache License
/** * test the local job submission options of * -jt local -libjars/*from w w w . ja va2 s .c o m*/ * @throws IOException */ @Test public void testLocalJobLibjarsOption() throws IOException { Assume.assumeFalse(Shell.WINDOWS); Path jarPath = makeJar(new Path(TEST_ROOT_DIR, "test.jar")); Configuration conf = new Configuration(); conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "hdfs://localhost:9000"); conf.set(MRConfig.FRAMEWORK_NAME, "local"); final String[] args = { "-jt", "local", "-libjars", jarPath.toString(), "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" }; int res = -1; try { res = ToolRunner.run(conf, new SleepJob(), args); } catch (Exception e) { System.out.println("Job failed with " + e.getLocalizedMessage()); e.printStackTrace(System.out); fail("Job failed"); } assertEquals("dist job res is not 0:", 0, res); }
From source file:org.apache.hadoop.mapred.TestMRTimelineEventHandling.java
License:Apache License
@Test @Ignore//from w w w.java 2s .c o m public void testMapreduceJobTimelineServiceEnabled() throws Exception { Assume.assumeFalse(Shell.WINDOWS); Configuration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false); MiniMRYarnCluster cluster = null; try { cluster = new MiniMRYarnCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1); cluster.init(conf); cluster.start(); conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort()); TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore(); Path inDir = new Path("input"); Path outDir = new Path("output"); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(0, entities.getEntities().size()); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); Assert.assertEquals(job.getID().toString(), tEntity.getEntityId()); } finally { if (cluster != null) { cluster.stop(); } } conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); cluster = null; try { cluster = new MiniMRYarnCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1); cluster.init(conf); cluster.start(); conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort()); TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore(); Path inDir = new Path("input"); Path outDir = new Path("output"); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(0, entities.getEntities().size()); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); Assert.assertEquals(job.getID().toString(), tEntity.getEntityId()); } finally { if (cluster != null) { cluster.stop(); } } }
From source file:org.apache.hadoop.mapreduce.v2.hs.TestJobListCache.java
License:Apache License
@Test(timeout = 5000) public void testAddExisting() { Assume.assumeFalse(Shell.WINDOWS); JobListCache cache = new JobListCache(2, 1000); JobId jobId = MRBuilderUtils.newJobId(1, 1, 1); HistoryFileInfo fileInfo = Mockito.mock(HistoryFileInfo.class); Mockito.when(fileInfo.getJobId()).thenReturn(jobId); cache.addIfAbsent(fileInfo);/* ww w.jav a2 s .c o m*/ cache.addIfAbsent(fileInfo); assertEquals("Incorrect number of cache entries", 1, cache.values().size()); }
From source file:org.apache.hadoop.mapreduce.v2.TestMRJobsWithProfiler.java
License:Apache License
@Test(timeout = 150000) public void testDifferentProfilers() throws Exception { Assume.assumeFalse(Shell.WINDOWS); LOG.info("Starting testDefaultProfiler"); testProfilerInternal(false);/*from w ww.j a v a 2s.co m*/ }
From source file:org.apache.hadoop.streaming.TestStreamingExitStatus.java
License:Apache License
@Test public void testMapFailOk() throws Exception { Assume.assumeFalse(Shell.WINDOWS); runStreamJob(false, true); }
From source file:org.apache.hadoop.streaming.TestStreamingExitStatus.java
License:Apache License
@Test public void testMapFailNotOk() throws Exception { Assume.assumeFalse(Shell.WINDOWS); runStreamJob(true, true); }
From source file:org.apache.hadoop.streaming.TestStreamingExitStatus.java
License:Apache License
@Test public void testReduceFailOk() throws Exception { Assume.assumeFalse(Shell.WINDOWS); runStreamJob(false, false); }
From source file:org.apache.hadoop.streaming.TestStreamingExitStatus.java
License:Apache License
@Test public void testReduceFailNotOk() throws Exception { Assume.assumeFalse(Shell.WINDOWS); runStreamJob(true, false); }