Example usage for org.apache.hadoop.fs FileSystem get

List of usage examples for org.apache.hadoop.fs FileSystem get

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem get.

Prototype

public static FileSystem get(Configuration conf) throws IOException 

Source Link

Document

Returns the configured FileSystem implementation.

Usage

From source file:Analysis.A9_Max_Activity_By_Time_of_Day.Most_Listens_By_Time_of_Day_Driver.java

/**
 * @param args the command line arguments
 *///from   w w w .  ja  va  2 s.c o  m
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Most listens by Time of the Day");
    job.setJarByClass(Most_Listens_By_Time_of_Day_Driver.class);

    job.setMapperClass(Most_Listens_By_Time_of_Day_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    int code = job.waitForCompletion(true) ? 0 : 1;

    if (code == 0) {
        for (Counter counter : job.getCounters()
                .getGroup(Most_Listens_By_Time_of_Day_Mapper.HOUR_COUNTER_GROUP)) {
            System.out.println(counter.getDisplayName() + "\t" + counter.getValue());
        }
    }

    FileSystem.get(conf).delete(new Path(args[1]), true);

    System.exit(code);
}

From source file:application.RecommenderEvaluator.java

License:Open Source License

/**
 *    il metodo evaluate prende in ingresso una collezione di item e media e standard error ad esso associato, 
 *    una collezione di user che ha come valore una collezione delle medie degli item per i quali ha espresso un voto,
 *    una stringa che contiene il path ad un file di testo (#user,#item,#vote) per effettuare i test. Il metodo fornisce in output la matrice di confusione cosi' formata 
 *    nella cella [0][0] ci sara' il numero di volte in cui il voto e' stato positivo e la previsione e' stata positiva (True Positive)
 *    nella cella [0][1] ci sara' il numero di volte in cui il voto e' stato positivo e la previsione e' stata negativa (False Negative)
 *    nella cella [1][0] ci sara' il numero di volte in cui il voto e' stato negativo e la previsione e' stata positiva (False Positive)
 *    nella cella [1][1] ci sara' il numero di volte in cui il voto e' stato negativo e la previsione e' stata negativa (True Negative)
 *    @param Stato_Item e' una collezione di item con media e standard error a ciascuno di esso associato
 *    @param Stato_User e' una collezione di user con valore una collezione delle medie degli item per i quali l'user ha espresso un voto
 *    @param s e' una stringa che contiene il path ad un file di testo per effettuare i test
 *    @return una matrice di confusione con il numero di TruePositive, FalsePositive, FalseNegative, TrueNegative ottenuti.
 *    @throws IOException/*from   w w  w.  ja v  a2s. co m*/
 */
public void evaluate() {
    String test_file = GLOBALS.getTEST_FILE_NAME();
    String split = GLOBALS.getSPLIT_TOKEN();
    try {
        FileSystem fs = FileSystem.get(conf);
        BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(test_file))));
        String line;
        while ((line = br.readLine()) != null) {
            StringTokenizer st = new StringTokenizer(line, split);
            int user = Integer.parseInt(st.nextToken());
            int item = Integer.parseInt(st.nextToken());
            int vote = Integer.parseInt(st.nextToken());
            UserProfile UP = USER_STATE.get(user);
            ItemProfile IP = ITEM_STATE.get(item);
            if (UP == null || IP == null) {
                skipped_evaluation++;
            } else {

                Boolean prediction = estimatePreference(UP, IP);
                if (prediction == null) {
                    skipped_evaluation++;
                    continue;
                }

                //               System.out.println("prediction:"+prediction+", vote:"+vote);
                if (prediction) {
                    if (vote == 1) {
                        CM[0][0] += 1;
                    } //TP
                    else if (vote == -1) {
                        CM[1][0] += 1;
                    } //FP
                } else {
                    if (vote == -1) {
                        CM[1][1] += 1;
                    } //TN
                    else if (vote == 1) {
                        CM[0][1] += 1;
                    } //FN
                }
            }
        }
        br.close();

        //STAT
        //         System.out.println("TP: "+CM[0][0]);
        //         System.out.println("FN: "+CM[0][1]);
        //         System.out.println("FP: "+CM[1][0]);
        //         System.out.println("TN: "+CM[1][1]);
        //         System.out.println("SKYPPED: "+skipped_evaluation);
    } catch (Exception e) {
        System.out.println("Exception " + e);
        e.printStackTrace();
        System.exit(-1);
    }
}

From source file:ark.util.HadoopUtil.java

License:Apache License

public static BufferedReader getFileReader(String path) {
    try {/*from w ww .  jav  a 2s.co m*/
        Path filePath = new Path(path);
        FileSystem fileSystem = FileSystem.get(new Configuration());
        BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(filePath)));
        return reader;
    } catch (Exception e) {
        return null;
    }
}

From source file:arpserver.HadoopTool.java

@Override
public int run(String[] strings) throws Exception {
    Configuration conf = new Configuration();
    String in = strings[0];/* w ww  .j av  a2s .c  om*/
    String out = strings[1];
    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(new Path(out))) {
        fs.delete(new Path(out), true);
        fs.delete(new Path(out + "Src"), true);
        fs.delete(new Path(out + "Mitm"), true);
        fs.delete(new Path(out + "ArpScn"), true);
        fs.delete(new Path(out + "s"), true);
        fs.delete(new Path(out + "d"), true);
        fs.delete(new Path(out + "t"), true);
    }
    Job job = new Job();
    Job job2 = new Job();
    Job job3 = new Job();
    Job job4 = new Job();
    Job job5 = new Job();
    Job job6 = new Job();
    Job job7 = new Job();
    job.setJobName("Q");
    job2.setJobName("Src");
    job3.setJobName("Mitm");
    job4.setJobName("ArpScn");
    job5.setJobName("s");
    job6.setJobName("d");
    job7.setJobName("time");
    job.setJarByClass(QuickDetect.class);

    job.setMapperClass(Qmapper.class);
    job.setReducerClass(Qreducer.class);

    job2.setMapperClass(Srcmapper.class);
    job2.setReducerClass(Srcreducer.class);

    job3.setMapperClass(ArpScanmapper.class);
    job3.setReducerClass(ArpScanreducer.class);

    job4.setMapperClass(Mitmmapper.class);
    job4.setReducerClass(Mitmreducer.class);

    job5.setMapperClass(Smapper.class);
    job5.setReducerClass(Sreducer.class);

    job6.setMapperClass(Dmapper.class);
    job6.setReducerClass(Dreducer.class);

    job7.setMapperClass(timemapper.class);
    job7.setReducerClass(timereducer.class);
    //testFinal168.txt
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(Text.class);

    job2.setOutputKeyClass(NullWritable.class);
    job2.setOutputValueClass(Text.class);

    job3.setOutputKeyClass(NullWritable.class);
    job3.setOutputValueClass(IntWritable.class);

    job4.setOutputKeyClass(NullWritable.class);
    job4.setOutputValueClass(Text.class);

    job5.setOutputKeyClass(NullWritable.class);
    job5.setOutputValueClass(Text.class);

    job6.setOutputKeyClass(NullWritable.class);
    job6.setOutputValueClass(Text.class);

    job7.setOutputKeyClass(NullWritable.class);
    job7.setOutputValueClass(Text.class);

    job.setMapOutputKeyClass(QuickDetect.class);
    job.setMapOutputValueClass(IntWritable.class);
    //job.setOutputFormatClass(YearMultipleTextOutputFormat.class);
    job2.setMapOutputKeyClass(DetectSrc.class);
    job2.setMapOutputValueClass(IntWritable.class);

    job3.setMapOutputKeyClass(DetectArpScan.class);
    job3.setMapOutputValueClass(IntWritable.class);

    job4.setMapOutputKeyClass(DetectMitm.class);
    job4.setMapOutputValueClass(IntWritable.class);

    job5.setMapOutputKeyClass(SMac.class);
    job5.setMapOutputValueClass(IntWritable.class);

    job6.setMapOutputKeyClass(DMac.class);
    job6.setMapOutputValueClass(IntWritable.class);

    job7.setMapOutputKeyClass(timeMac.class);
    job7.setMapOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out));
    if (job.waitForCompletion(true)) {
        FileInputFormat.addInputPath(job2, new Path(in));
        FileOutputFormat.setOutputPath(job2, new Path(out + "Src"));
        if (job2.waitForCompletion(true)) {
            FileInputFormat.addInputPath(job3, new Path(in));
            FileOutputFormat.setOutputPath(job3, new Path(out + "ArpScn"));
            if (job3.waitForCompletion(true)) {
                FileInputFormat.addInputPath(job4, new Path(in));
                FileOutputFormat.setOutputPath(job4, new Path(out + "Mitm"));
                if (job4.waitForCompletion(true)) {
                    FileInputFormat.addInputPath(job5, new Path(in));
                    FileOutputFormat.setOutputPath(job5, new Path(out + "s"));
                    if (job5.waitForCompletion(true)) {
                        FileInputFormat.addInputPath(job6, new Path(in));
                        FileOutputFormat.setOutputPath(job6, new Path(out + "d"));
                        if (job6.waitForCompletion(true)) {
                            FileInputFormat.addInputPath(job7, new Path(in));
                            FileOutputFormat.setOutputPath(job7, new Path(out + "t"));
                            job7.waitForCompletion(true);
                        } else {
                            return 1;
                        }
                    } else {
                        return 1;
                    }
                } else {
                    return 1;
                }
            } else {
                return 1;
            }
        } else {
            return 1;
        }
    } else {
        return 1;
    }
    return 0;
}

From source file:arpserver.serverUdp.java

public serverUdp(databasesArp db) {
    this.db = db;
    value = this.db.selectSetting();
    interval = value[1];//from w  w w.j  a va2 s  . c  o  m
    name = value[2];
    path = value[3];
    hadooppath = value[4];
    hdfspath = value[5];
    ip = value[6];
    port = Integer.parseInt(value[7]);
    last = this.db.getMaxData();
    dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
    startHadoop();
    try {
        serverSocket = new DatagramSocket(port);
    } catch (SocketException ex) {
        Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
    }
    try {
        fs = FileSystem.get(conf);
    } catch (IOException ex) {
        Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
    }
    receiveData = new byte[150];
    sendData = new byte[150];
    lock = new ReentrantLock();
    cf = new Thread(new Runnable() {

        @Override
        public void run() {
            getTimeI();
            getTime();
            while (true) {
                //System.out.println("++++"+time +" "+ timeI);
                if (time > timeI) {
                    System.out.println("++++" + time + " " + timeI);
                    //try {
                    //    fw.close();
                    //} catch (IOException ex) {
                    //    Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
                    //}
                    last++;
                    getTimeI();
                    getTime();
                    System.out.println("save");
                    inputData = true;
                    x.setNetwork(statusinput);
                    x.setDataBases(db.getStatus());
                    createFile();
                }
                getTime();
            }

        }
    });
}

From source file:Assignment3_P2_MergeStockAverageCount.StockPriceMergeDriver.java

/**
 * @param args the command line arguments
 */// w w w .j ava2  s  .c om
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();

    // local file system handle
    FileSystem local = FileSystem.getLocal(conf);

    // hdfs file system handle
    FileSystem hdfs = FileSystem.get(conf);

    // local input directory
    Path inputDir = new Path(args[0]);

    // hdfs i/p  directory
    Path inputDir1 = new Path(args[1]);

    // local input files in local dir
    FileStatus[] inputFiles = local.listStatus(inputDir);

    // o/p stream
    FSDataOutputStream out = hdfs.create(inputDir1);

    // open each file and extract contents of file
    for (int i = 0; i < inputFiles.length; i++) {
        System.out.println("File name ----------------------------------------------------------------> "
                + inputFiles[i].getPath().getName());
        FSDataInputStream in = local.open(inputFiles[i].getPath());
        byte buffer[] = new byte[256];
        int bytesRead = 0;

        // extract all contents of file
        while ((bytesRead = in.read(buffer)) > 0) {
            out.write(buffer, 0, bytesRead);
        }

        // close input stream
        in.close();
    }

    Job job = Job.getInstance(conf, "Average Stock Price");
    job.setJarByClass(StockPriceMergeDriver.class);
    job.setMapperClass(StockPriceMerge_Mapper.class);
    job.setCombinerClass(StockPriceMerge_Reducer.class);
    job.setReducerClass(StockPriceMerge_Reducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(FloatWritable.class);
    FileInputFormat.addInputPath(job, new Path(args[1])); // above programs output will be input for mapper
    FileOutputFormat.setOutputPath(job, new Path(args[2]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.DistributedRowMatrix.java

License:Apache License

public void setOutputTempPathString(String outPathString) {
    try {/*from   w  w w. java  2 s.  co  m*/
        outputTmpBasePath = FileSystem.get(conf).makeQualified(new Path(outPathString));
    } catch (IOException ioe) {
        log.error(
                "Unable to set outputBasePath to {}, leaving as {}" + outPathString + " " + outputTmpBasePath);
    }
}

From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.DistributedRowMatrix.java

License:Apache License

@Override
public Iterator<MatrixSlice> iterateAll() {
    try {/*from w w w . j  a va2 s. c o m*/
        Path pathPattern = rowPath;
        if (FileSystem.get(conf).getFileStatus(rowPath).isDir()) {
            pathPattern = new Path(rowPath, "*");
        }
        return Iterators.transform(
                new SequenceFileDirIterator<IntWritable, VectorWritable>(pathPattern, PathType.GLOB,
                        PathFilters.logsCRCFilter(), null, true, conf),
                new Function<Pair<IntWritable, VectorWritable>, MatrixSlice>() {
                    @Override
                    public MatrixSlice apply(Pair<IntWritable, VectorWritable> from) {
                        return new MatrixSlice(from.getSecond().get(), from.getFirst().get());
                    }
                });
    } catch (IOException ioe) {
        throw new IllegalStateException(ioe);
    }
}

From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.DistributedRowMatrix.java

License:Apache License

public static void writeDistributedRowMatrix(Configuration conf, double[][] matrix, int rows, int columns,
        Path path, boolean saveTransposed) throws Exception {

    SequenceFile.Writer writer = null;
    try {/*from w  w w.ja  v a 2 s.  com*/
        FileSystem fs = FileSystem.get(conf);
        writer = new SequenceFile.Writer(fs, conf, path, IntWritable.class, VectorWritable.class);

        if (saveTransposed) { // Transpose Matrix before saving
            double[][] transposed = new double[columns][rows];
            for (int i = 0; i < rows; i++) {
                for (int j = 0; j < columns; j++) {
                    transposed[j][i] = matrix[i][j];
                }
            }
            matrix = transposed;
        }

        for (int i = 0; i < matrix.length; i++) {
            DenseVector rowVector = new DenseVector(matrix[i]);
            writer.append(new IntWritable(i), new VectorWritable(rowVector));
        }

    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (writer != null) {
            try {
                writer.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.MatrixMultiplicationBenchmark.java

License:Apache License

@Override
protected void setUp() throws Exception {
    m_conf = new Configuration();

    // Try to load Hadoop configuration
    String HADOOP_HOME = System.getenv("HADOOP_HOME");
    String HADOOP_INSTALL = System.getenv("HADOOP_INSTALL");

    if ((HADOOP_HOME != null) || (HADOOP_INSTALL != null) && (!m_runLocally)) {
        String HADOOP = ((HADOOP_HOME != null) ? HADOOP_HOME : HADOOP_INSTALL);

        m_conf.addResource(new Path(HADOOP, "src/core/core-default.xml"));
        m_conf.addResource(new Path(HADOOP, "src/hdfs/hdfs-default.xml"));
        m_conf.addResource(new Path(HADOOP, "src/mapred/mapred-default.xml"));
        m_conf.addResource(new Path(HADOOP, "conf/core-site.xml"));
        m_conf.addResource(new Path(HADOOP, "conf/hdfs-site.xml"));
        m_conf.addResource(new Path(HADOOP, "conf/mapred-site.xml"));
        // System.out.println("Loaded Hadoop configuration from " + HADOOP);

        try {/*from  w w  w.  ja  va  2 s . c o  m*/
            // Connect to HDFS Filesystem
            FileSystem.get(m_conf);
        } catch (Exception e) {
            // HDFS not reachable run Benchmark locally
            m_conf = new Configuration();
            m_runLocally = true;
        }
    }

    // Create random DistributedRowMatrix and write out transposed
    DistributedRowMatrix.createRandomDistributedRowMatrix(m_conf, n, n, new Random(42L),
            m_transposedMatrixAPath, true);
    DistributedRowMatrix.createRandomDistributedRowMatrix(m_conf, n, n, new Random(), m_matrixBPath, false);

    // Load DistributedRowMatrix A and B
    m_transposedMatrixA = new DistributedRowMatrix(m_transposedMatrixAPath, CONF_INPUT_DIR, n, n);
    m_transposedMatrixA.setConf(m_conf);

    m_matrixB = new DistributedRowMatrix(m_matrixBPath, CONF_INPUT_DIR, n, n);
    m_matrixB.setConf(m_conf);

    // Debug output
    System.out.println("CONF_TMP_DIR: " + CONF_TMP_DIR.toString());
    System.out.println("Benchmark " + n + " x " + n + " matrix on " + type);
}