Example usage for org.apache.hadoop.fs Path Path

List of usage examples for org.apache.hadoop.fs Path Path

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Path Path.

Prototype

public Path(URI aUri) 

Source Link

Document

Construct a path from a URI

Usage

From source file:ar.edu.ungs.garules.CensusJob.java

License:Apache License

/**
 * Main -> Ejecucion del proceso/* w  w w.  j  a  v a2s  .com*/
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {

    long time = System.currentTimeMillis();
    Individual<BitSet> bestInd = null;
    if (args.length != 2)
        args = DEFAULT_ARGS;

    // Preparacion del GA
    // --------------------------------------------------------------------------------------------------------------
    Set<Individual<BitSet>> bestIndividuals = new HashSet<Individual<BitSet>>();
    List<Gene> genes = new ArrayList<Gene>();
    genes.add(genCondicionACampo);
    genes.add(genCondicionAOperador);
    genes.add(genCondicionAValor);
    genes.add(genCondicionBPresente);
    genes.add(genCondicionBCampo);
    genes.add(genCondicionBOperador);
    genes.add(genCondicionBValor);
    genes.add(genCondicionCPresente);
    genes.add(genCondicionCCampo);
    genes.add(genCondicionCOperador);
    genes.add(genCondicionCValor);
    genes.add(genPrediccionCampo);
    genes.add(genPrediccionValor);

    Map<Gene, Ribosome<BitSet>> translators = new HashMap<Gene, Ribosome<BitSet>>();
    for (Gene gene : genes)
        translators.put(gene, new BitSetToIntegerRibosome(0));

    Genome<BitSet> genome = new BitSetGenome("Chromosome 1", genes, translators);

    Parameter<BitSet> par = new Parameter<BitSet>(0.035, 0.9, 200, new DescendantAcceptEvaluator<BitSet>(),
            new CensusFitnessEvaluator(), new BitSetOnePointCrossover(), new BitSetFlipMutator(), null,
            new BitSetRandomPopulationInitializer(), null, new ProbabilisticRouletteSelector(),
            new GlobalSinglePopulation<BitSet>(genome), 500, 100d, new BitSetMorphogenesisAgent(), genome);

    ParallelFitnessEvaluationGA<BitSet> ga = new ParallelFitnessEvaluationGA<BitSet>(par);
    ga.init();
    // --------------------------------------------------------------------------------------------------------------
    // Fin de Preparacion del GA

    // Itera hasta el maximo de generaciones permitidas 
    for (int i = 0; i < par.getMaxGenerations(); i++) {
        ga.initGeneration();
        Configuration conf = new Configuration();

        // Debug
        //showPopulation(ga.getPopulation());
        //System.out.println((System.currentTimeMillis()-time)/1000 + "s transcurridos desde el inicio");

        // Pasamos como parmetro las condiciones a evaluar
        Iterator<Individual<BitSet>> ite = ga.getPopulation().iterator();
        int contador = 0;
        Set<String> expUnicas = new HashSet<String>();
        while (ite.hasNext()) {
            Individual<BitSet> ind = ite.next();
            String rep = RuleStringAdaptor.adapt(RuleAdaptor.adapt(ind));
            expUnicas.add(rep);
        }
        for (String rep : expUnicas)
            if (ocurrencias.get(rep) == null) {
                conf.set(String.valueOf(contador), rep);
                contador++;
            }

        // Configuracion del job i
        Job job = new Job(conf, "GA rules - Generation " + i);
        job.setJarByClass(CensusJob.class);
        job.setMapperClass(CensusMapper.class);
        job.setCombinerClass(CensusReducer.class);
        job.setReducerClass(CensusReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.setOutputFormatClass(SequenceFileOutputFormat.class);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        SequenceFileOutputFormat.setOutputPath(job, new Path(args[1] + "g" + i));

        // Corrida del trabajo map-reduce representando a la generacion i
        job.waitForCompletion(true);

        // Aca calculamos el fitness en base a lo que arrojo el job y si hay un mejor individuo lo agregamos al set de mejores individuos....  
        llenarOcurrencias(conf, args[1] + "g" + i);

        // Corremos GA para la generacion.
        Individual<BitSet> winnerGen = ga.run(new CensusFitnessEvaluator(ocurrencias));

        // Mantenemos los mejores individuos
        if (bestInd == null) {
            bestInd = winnerGen;
            bestIndividuals.add(winnerGen);
        } else if (winnerGen.getFitness() > bestInd.getFitness()) {
            bestInd = winnerGen;
            bestIndividuals.add(winnerGen);
        }

        // Debug
        System.out.println("Mejor Individuo Generacion " + i + " => " + RuleAdaptor.adapt(bestInd)
                + " => Fitness = " + bestInd.getFitness());

    }

    // Ordenamos y mostramos los mejores individuos
    List<Individual<BitSet>> bestIndList = new ArrayList<Individual<BitSet>>(bestIndividuals);
    Collections.sort(bestIndList, new Comparator<Individual<BitSet>>() {
        public int compare(Individual<BitSet> o1, Individual<BitSet> o2) {
            return (o1.getFitness() > o2.getFitness() ? -1 : (o1.getFitness() == o2.getFitness() ? 0 : 1));
        }
    });
    showPopulation(bestIndList);
    System.out.println("Tiempo total de corrida " + (System.currentTimeMillis() - time) / 1000 + "s");

}

From source file:ar.edu.ungs.garules.CensusJob.java

License:Apache License

/**
 * Toma la salida del reducer del file system distribuido y la carga en el mapa "ocurrencias" en memoria
 * @param conf// w  w w. j a va2s  . c o m
 * @param path
 * @throws IOException
 */
@SuppressWarnings("deprecation")
private static void llenarOcurrencias(Configuration conf, String path) throws IOException {
    FileSystem fs = new DistributedFileSystem(
            new InetSocketAddress(DEFAULT_FILE_SYSTEM_HOST, DEFAULT_FILE_SYSTEM_PORT), conf);
    SequenceFile.Reader reader = new SequenceFile.Reader(fs, new Path(path + "/part-r-00000"), conf);

    Text key = new Text();
    IntWritable value = new IntWritable();
    while (reader.next(key, value))
        ocurrencias.put(key.toString(), value.get());
    reader.close();
}

From source file:ark.util.HadoopUtil.java

License:Apache License

public static BufferedReader getFileReader(String path) {
    try {/*from  www  .  j  av  a2  s  .  co  m*/
        Path filePath = new Path(path);
        FileSystem fileSystem = FileSystem.get(new Configuration());
        BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(filePath)));
        return reader;
    } catch (Exception e) {
        return null;
    }
}

From source file:arpserver.HadoopTool.java

@Override
public int run(String[] strings) throws Exception {
    Configuration conf = new Configuration();
    String in = strings[0];/* w  ww . ja  v  a 2  s . c  o m*/
    String out = strings[1];
    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(new Path(out))) {
        fs.delete(new Path(out), true);
        fs.delete(new Path(out + "Src"), true);
        fs.delete(new Path(out + "Mitm"), true);
        fs.delete(new Path(out + "ArpScn"), true);
        fs.delete(new Path(out + "s"), true);
        fs.delete(new Path(out + "d"), true);
        fs.delete(new Path(out + "t"), true);
    }
    Job job = new Job();
    Job job2 = new Job();
    Job job3 = new Job();
    Job job4 = new Job();
    Job job5 = new Job();
    Job job6 = new Job();
    Job job7 = new Job();
    job.setJobName("Q");
    job2.setJobName("Src");
    job3.setJobName("Mitm");
    job4.setJobName("ArpScn");
    job5.setJobName("s");
    job6.setJobName("d");
    job7.setJobName("time");
    job.setJarByClass(QuickDetect.class);

    job.setMapperClass(Qmapper.class);
    job.setReducerClass(Qreducer.class);

    job2.setMapperClass(Srcmapper.class);
    job2.setReducerClass(Srcreducer.class);

    job3.setMapperClass(ArpScanmapper.class);
    job3.setReducerClass(ArpScanreducer.class);

    job4.setMapperClass(Mitmmapper.class);
    job4.setReducerClass(Mitmreducer.class);

    job5.setMapperClass(Smapper.class);
    job5.setReducerClass(Sreducer.class);

    job6.setMapperClass(Dmapper.class);
    job6.setReducerClass(Dreducer.class);

    job7.setMapperClass(timemapper.class);
    job7.setReducerClass(timereducer.class);
    //testFinal168.txt
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(Text.class);

    job2.setOutputKeyClass(NullWritable.class);
    job2.setOutputValueClass(Text.class);

    job3.setOutputKeyClass(NullWritable.class);
    job3.setOutputValueClass(IntWritable.class);

    job4.setOutputKeyClass(NullWritable.class);
    job4.setOutputValueClass(Text.class);

    job5.setOutputKeyClass(NullWritable.class);
    job5.setOutputValueClass(Text.class);

    job6.setOutputKeyClass(NullWritable.class);
    job6.setOutputValueClass(Text.class);

    job7.setOutputKeyClass(NullWritable.class);
    job7.setOutputValueClass(Text.class);

    job.setMapOutputKeyClass(QuickDetect.class);
    job.setMapOutputValueClass(IntWritable.class);
    //job.setOutputFormatClass(YearMultipleTextOutputFormat.class);
    job2.setMapOutputKeyClass(DetectSrc.class);
    job2.setMapOutputValueClass(IntWritable.class);

    job3.setMapOutputKeyClass(DetectArpScan.class);
    job3.setMapOutputValueClass(IntWritable.class);

    job4.setMapOutputKeyClass(DetectMitm.class);
    job4.setMapOutputValueClass(IntWritable.class);

    job5.setMapOutputKeyClass(SMac.class);
    job5.setMapOutputValueClass(IntWritable.class);

    job6.setMapOutputKeyClass(DMac.class);
    job6.setMapOutputValueClass(IntWritable.class);

    job7.setMapOutputKeyClass(timeMac.class);
    job7.setMapOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out));
    if (job.waitForCompletion(true)) {
        FileInputFormat.addInputPath(job2, new Path(in));
        FileOutputFormat.setOutputPath(job2, new Path(out + "Src"));
        if (job2.waitForCompletion(true)) {
            FileInputFormat.addInputPath(job3, new Path(in));
            FileOutputFormat.setOutputPath(job3, new Path(out + "ArpScn"));
            if (job3.waitForCompletion(true)) {
                FileInputFormat.addInputPath(job4, new Path(in));
                FileOutputFormat.setOutputPath(job4, new Path(out + "Mitm"));
                if (job4.waitForCompletion(true)) {
                    FileInputFormat.addInputPath(job5, new Path(in));
                    FileOutputFormat.setOutputPath(job5, new Path(out + "s"));
                    if (job5.waitForCompletion(true)) {
                        FileInputFormat.addInputPath(job6, new Path(in));
                        FileOutputFormat.setOutputPath(job6, new Path(out + "d"));
                        if (job6.waitForCompletion(true)) {
                            FileInputFormat.addInputPath(job7, new Path(in));
                            FileOutputFormat.setOutputPath(job7, new Path(out + "t"));
                            job7.waitForCompletion(true);
                        } else {
                            return 1;
                        }
                    } else {
                        return 1;
                    }
                } else {
                    return 1;
                }
            } else {
                return 1;
            }
        } else {
            return 1;
        }
    } else {
        return 1;
    }
    return 0;
}

From source file:arpserver.serverUdp.java

public void uploadFile(String inputLocal, String inputServer) {
    inFile = new Path(inputLocal);
    outFile = new Path(inputServer);

    try {/* w  w  w  .  j  a  v a2 s  . com*/
        //fs.create(inFile);
        if (!fs.exists(inFile)) {
            fs.copyFromLocalFile(inFile, outFile);
            System.out.println("++++" + inputLocal + "->" + inputServer);
        }
    } catch (IOException ex) {
        Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:arpserver.serverUdp.java

public void downloadFile(String outputLocal, String outputServer) {
    inFile = new Path(outputLocal + "/part-r-00000");
    outFile = new Path(outputServer + "/test.txt");
    try {/*from www  .  j  a  v  a 2 s. c  o m*/
        if (fs.exists(inFile)) {
            inFile = new Path(outputLocal + "/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/Q.txt");
            fs.moveToLocalFile(inFile, outFile);
            inFile = new Path(outputLocal + "ArpScn/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/ArpScn.txt");
            fs.moveToLocalFile(inFile, outFile);
            inFile = new Path(outputLocal + "Mitm/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/Mitm.txt");
            fs.moveToLocalFile(inFile, outFile);
            inFile = new Path(outputLocal + "Src/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/Src.txt");
            fs.moveToLocalFile(inFile, outFile);
            inFile = new Path(outputLocal + "s/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/s.txt");
            fs.moveToLocalFile(inFile, outFile);
            inFile = new Path(outputLocal + "d/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/d.txt");
            fs.moveToLocalFile(inFile, outFile);
            inFile = new Path(outputLocal + "t/part-r-00000");
            outFile = new Path("/var/www/html/SI-ARP/data/TestTAFinal/" + outputServer + "/t.txt");
            fs.moveToLocalFile(inFile, outFile);
        }
    } catch (IOException ex) {
        Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:arrestsbyyear.ArrestsByYear.java

public int run(String[] args) throws Exception {
    Configuration conf = getConf();

    JobConf job = new JobConf(conf, ArrestsByYear.class);

    Path in = new Path(args[0]);
    Path out = new Path(args[1]);
    FileInputFormat.setInputPaths(job, in);
    FileOutputFormat.setOutputPath(job, out);

    job.setJobName("ArrestsByYear");
    job.setMapperClass(MapClass.class);
    job.setReducerClass(Reduce.class);

    job.setInputFormat(KeyValueTextInputFormat.class);
    job.setOutputFormat(TextOutputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    //   job.set("key.value.separator.in.input.line", "");

    JobClient.runJob(job);/*from  www  . j a  va 2s .  c  o m*/

    return 0;
}

From source file:AshleyIngram.FYP.Hadoop.WordCount.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);//from  ww  w . j  a v  a 2  s.  c  o m
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:assignment1.WordCount.LinkedSort.LinkedSort.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>");
        System.exit(2);// www  .j ava 2 s .  c  o  m
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(LinkedSort.class);
    job.setMapperClass(TokenizerMapper.class);
    //job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setPartitionerClass(SortPartitioner.class);
    job.setOutputKeyClass(WordAndLength.class);
    job.setOutputValueClass(IntWritable.class);

    job.setNumReduceTasks(2);
    for (int i = 0; i < otherArgs.length - 1; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:assignment1.WordCount.WordCountInMap.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>");
        System.exit(2);//from w  w w  .  ja  va  2s .co  m
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(assignment1.WordCount.WordCountInMap.class);
    job.setMapperClass(TokenizerMapper.class);
    //job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    for (int i = 0; i < otherArgs.length - 1; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}