Example usage for org.apache.hadoop.fs FSDataInputStream read

List of usage examples for org.apache.hadoop.fs FSDataInputStream read

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FSDataInputStream read.

Prototype

@Override
    public int read(ByteBuffer buf) throws IOException 

Source Link

Usage

From source file:com.rockstor.core.io.ChunkReader.java

License:Apache License

public static Chunk readIndex(FSDataInputStream input) throws IllegalArgumentException, IOException {
    Chunk chunk = new Chunk();

    chunk.setOffset(input.readLong());//from ww w.  ja v a2  s. c  o  m

    byte[] rockID = new byte[Rock.ROCK_ID_LEN];
    if (Rock.ROCK_ID_LEN != input.read(rockID)) {
        throw new IOException("read rock magic failed!");
    }

    chunk.setRockID(rockID);

    byte[] chunkPrefix = new byte[Chunk.CHUNK_ID_LEN];
    if (Chunk.CHUNK_ID_LEN != input.read(chunkPrefix)) {
        throw new IOException("read chunk ID failed!");
    }

    chunk.setChunkPrefix(chunkPrefix);
    chunk.setPartID(input.readShort());
    chunk.setSeqID(input.readShort());
    chunk.setTimestamp(input.readLong());
    chunk.setSize(input.readLong());

    return chunk;
}

From source file:edu.arizona.cs.hadoop.fs.irods.output.HirodsFileOutputCommitter.java

License:Apache License

private void moveTaskOutputsToIRODS(TaskAttemptContext context, FileSystem outfs, Path outDir,
        FileSystem workfs, Path workOutput) throws IOException {
    context.progress();/*w  w w.  j  a va  2 s  .c o m*/
    if (workfs.isFile(workOutput)) {
        Path finalOutputPath = getFinalPath(outDir, workOutput, this.workPath);
        FSDataOutputStream irods_os = null;
        FSDataInputStream temp_is = null;
        try {
            // commit to iRODS
            irods_os = outfs.create(finalOutputPath, true);
            temp_is = workfs.open(workOutput);

            byte[] buffer = new byte[100 * 1024];
            int bytes_read = 0;

            while ((bytes_read = temp_is.read(buffer)) != -1) {
                irods_os.write(buffer, 0, bytes_read);
            }
        } finally {
            if (temp_is != null) {
                try {
                    temp_is.close();
                } catch (IOException ex) {
                    // ignore exceptions
                }
            }

            // remove temporary file
            try {
                workfs.delete(workOutput, true);
            } catch (IOException ex) {
                // ignore exceptions
            }

            if (irods_os != null) {
                irods_os.close();
            }
        }

        LOG.debug("Moved " + workOutput + " to " + finalOutputPath);
    } else if (workfs.getFileStatus(workOutput).isDir()) {
        FileStatus[] paths = workfs.listStatus(workOutput);
        Path finalOutputPath = getFinalPath(outDir, workOutput, this.workPath);
        outfs.mkdirs(finalOutputPath);
        if (paths != null) {
            for (FileStatus path : paths) {
                moveTaskOutputsToIRODS(context, outfs, outDir, workfs, path.getPath());
            }
        }
    }
}

From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java

License:Open Source License

/**
 * A main method that creates a single R-tree out of a single file.
 * @param args/*from   w  ww  . ja  v  a 2 s  . c o  m*/
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
    final OperationsParams params = new OperationsParams(new GenericOptionsParser(args));
    if (!params.checkInputOutput())
        throw new RuntimeException("Input-output combination not correct");
    Path inPath = params.getInputPath();
    Path outPath = params.getOutputPath();
    Shape shape = params.getShape("shape");

    // Read the whole input file as one byte array
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    byte[] buffer = new byte[1024 * 1024];
    FileSystem inFS = inPath.getFileSystem(params);
    FSDataInputStream in = inFS.open(inPath);
    int bytesRead;
    while ((bytesRead = in.read(buffer)) >= 0) {
        baos.write(buffer, 0, bytesRead);
    }
    in.close();
    baos.close();

    // Create the R-tree and write to output
    byte[] inputData = baos.toByteArray();
    FileSystem outFS = outPath.getFileSystem(params);
    FSDataOutputStream out = outFS.create(outPath);
    out.write(SpatialSite.RTreeFileMarkerB);
    RTree.bulkLoadWrite(inputData, 0, inputData.length, 4, out, shape, true);
    out.close();
}

From source file:edu.umn.cs.spatialHadoop.visualization.HadoopvizServer.java

License:Open Source License

/**
 * This method will handle each time a file need to be fetched from HDFS.
 * //w w w.  ja  va  2 s . com
 * @param request
 * @param response
 */
private void handleHDFSFetch(HttpServletRequest request, HttpServletResponse response) {
    try {
        String path = request.getRequestURI().replace("/hdfs", "");
        Path filePath = new Path(path);
        FileSystem fs = filePath.getFileSystem(commonParams);

        LOG.info("Fetching from " + path);

        FSDataInputStream resource;

        resource = fs.open(filePath);

        if (resource == null) {
            reportError(response, "Cannot load resource '" + filePath + "'", null);
            return;
        }
        byte[] buffer = new byte[1024 * 1024];
        ServletOutputStream outResponse = response.getOutputStream();
        int size;
        while ((size = resource.read(buffer)) != -1) {
            outResponse.write(buffer, 0, size);
        }
        resource.close();
        outResponse.close();
        response.setStatus(HttpServletResponse.SC_OK);
        if (filePath.toString().endsWith("png")) {
            response.setContentType("image/png");
        }
    } catch (Exception e) {
        System.out.println("error happened");
        try {
            e.printStackTrace(response.getWriter());
        } catch (IOException ioe) {
            ioe.printStackTrace();
            e.printStackTrace();
        }
        response.setContentType("text/plain;charset=utf-8");
        response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    }
}

From source file:fire.util.fileformats.combineimagefileinputformat.CombineFileImageRecordReader.java

License:Apache License

public boolean nextKeyValue() throws IOException {
    if (numRecordsRead > 0)
        return false;

    if (key == null) {
        key = new Text(path.getName());
    }//from   w  w  w  .  j  av a2  s  . c  o m
    if (value == null) {
        value = new BytesWritable();
    }

    //String uri = key.toString();
    //Configuration conf = new Configuration();
    //FileSystem fs = FileSystem.get(URI.create(uri), conf);
    FSDataInputStream in = null;
    try {

        /***
        in = fs.open(path);
        java.io.ByteArrayOutputStream bout = new ByteArrayOutputStream();
        byte buffer[] = new byte[1024 * 1024];
                
        while( in.read(buffer, 0, buffer.length) >= 0 ) {
        bout.write(buffer);
        }
         ***/

        in = fs.open(path);
        byte buffer[] = new byte[in.available()];
        in.read(buffer);

        value = new BytesWritable(buffer);
    } finally {
        IOUtils.closeStream(in);
    }

    numRecordsRead++;

    return true;
}

From source file:FormatStorage.Head.java

License:Open Source License

public void unpersistent(FSDataInputStream in) throws Exception {
    ver = in.readShort();//from  w  w w  . ja v a  2 s .  com
    var = in.readByte();
    compress = in.readByte();
    compressStyle = in.readByte();
    primaryIndex = in.readShort();
    encode = in.readByte();
    encodeStyle = in.readByte();

    short keyLen = in.readShort();
    if (keyLen != 0) {
        byte[] keyBytes = new byte[keyLen];
        in.read(keyBytes);

        key = new String(keyBytes);
    }

    if (fieldMap == null) {
        fieldMap = new FieldMap();
    }

    fieldMap.unpersistent(in);

}

From source file:io.hops.hopsworks.apiV2.projects.BlobsResource.java

License:Open Source License

private StreamingOutput buildOutputStream(final FSDataInputStream stream,
        final DistributedFileSystemOps udfso) {
    return (OutputStream out) -> {
        try {/*from  ww  w. ja  v a  2s  . c  o m*/
            int length;
            byte[] buffer = new byte[1024];
            while ((length = stream.read(buffer)) != -1) {
                out.write(buffer, 0, length);
            }
            out.flush();
            stream.close();
        } finally {
            dfs.closeDfsClient(udfso);
        }
    };
}

From source file:ir.ac.ut.snl.mrcd.StageFour.java

public int run(String[] args)
        throws IOException, URISyntaxException, InterruptedException, ClassNotFoundException {
    System.out.println("enter fuckin run");
    Job job = new Job();
    String input = args[0];// w  ww .ja  v  a2 s  . c  om
    String output = args[1];
    FileInputFormat.addInputPath(job, new Path(input));
    FileOutputFormat.setOutputPath(job, new Path(output));
    job.setJarByClass(StageFour.class);
    job.setJobName("Stage four");
    job.setMapperClass(StageFourMapper.class);
    job.setReducerClass(StageFourReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setInputFormatClass(KeyValueTextInputFormat.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Edge.class);

    Path inFile = new Path("/home/arian/NetBeansProjects/bscthesis2/topkedgebetweenness");
    Path outFile = new Path("/home/arian/myhadoop/NetBeansProjects/bscthesis2/topkedgebetweenness");
    //        fs = FileSystem.get(new Configuration());
    fs = FileSystem.get(configuration);
    FSDataInputStream in = fs.open(inFile);
    FSDataOutputStream out = fs.create(outFile);
    System.out.println("fs is ok");
    int bytesRead = 0;
    byte buffer[] = new byte[256];
    while ((bytesRead = in.read(buffer)) > 0) {
        out.write(buffer, 0, bytesRead);
    }
    in.close();
    out.close();

    System.out.println("copy is ok");

    DistributedCache.addCacheFile(new URI("/home/arian/myhadoop/NetBeansProjects/bscthesis2/input3-converted"),
            configuration);
    DistributedCache.addCacheFile(
            new URI("/home/arian/myhadoop/NetBeansProjects/bscthesis2/topkedgebetweenness"), configuration);
    //        URI[] localCacheFilesx = DistributedCache.getCacheFiles(configuration);
    //        if (localCacheFilesx == null) {
    //            System.out.println("NULLE BI PEDARsssssssss");
    //        }
    //        if (localCacheFilesx != null) {
    //            System.out.println("There's something in the cache now.");
    //        }

    System.out.println("salam lllaaa");

    //        bufferedReader = null;
    //        bufferedReader2 = null;
    //        scanner = null;
    //        scanner2 = null;
    localCacheFiles = DistributedCache.getCacheFiles(configuration);
    if (localCacheFiles == null) {
        System.out.println("NULLE");
    }
    if (localCacheFiles != null) {
        System.out.println("There's something in the cache. an    " + localCacheFiles[1].toString());
        //                fileReader = new FileReader(localCacheFiles[1].toString());
        fs = FileSystem.get(configuration);
        in = fs.open(new Path(localCacheFiles[1].toString()));
        bufferedReader = new BufferedReader(new InputStreamReader(in));
        scanner = new Scanner(bufferedReader);
        if (!scanner.hasNextLine())
            System.out.println("ay ay AY AY SCANNER nextline nadare!!!!!!!!!!!!");
    }

    //        System.out.println("ssssalam sssssxxxxxx23ssa     " + scanner.nextLine());
    job.waitForCompletion(true);
    return 0;
}

From source file:ir.ac.ut.snl.mrcd.StageThree.java

public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Job job = new Job();
    String input = args[0];/*  w  ww . j a va  2s  . c o  m*/
    String output = args[1];
    FileInputFormat.addInputPath(job, new Path(input));
    FileOutputFormat.setOutputPath(job, new Path(output));
    job.setJarByClass(StageThree.class);
    job.setJobName("Stage three");
    job.setMapperClass(StageThreeMapper.class);
    job.setReducerClass(StageThreeReducer.class);
    //        job.setOutputKeyClass(Text.class);
    //        job.setOutputValueClass(DoubleWritable.class);
    job.setOutputKeyClass(DoubleWritable.class);
    job.setOutputValueClass(Text.class);

    job.setInputFormatClass(KeyValueTextInputFormat.class);
    job.setSortComparatorClass(SortDoubleComparator.class);

    job.waitForCompletion(true);

    Scanner scanner = null;
    try {
        File file = new File("/home/arian/NetBeansProjects/bscthesis2/output/stagethree/part-r-00000");
        FileReader fileReader = new FileReader(file);
        BufferedReader bufferedReader = new BufferedReader(fileReader);
        scanner = new Scanner(bufferedReader);
    } catch (Exception e) {
        System.out.println("NA NASHOD NASHOD NASHOD FILE BAZ NASHOD");
        e.printStackTrace();
    }

    PrintWriter printWriter = new PrintWriter("/home/arian/NetBeansProjects/bscthesis2/topkedgebetweenness",
            "UTF-8");

    int k = 4;
    for (int i = 0; i < k; i++) {
        printWriter.write(scanner.nextLine());
        //            if (i != k - 1)
        printWriter.write('\n');
    }
    printWriter.close();
    scanner.close();

    Path inFile = new Path("/home/arian/NetBeansProjects/bscthesis2/topkedgebetweenness");
    Path outFile = new Path("/home/arian/myhadoop/NetBeansProjects/bscthesis2/topkedgebetweenness");
    FileSystem fs = FileSystem.get(new Configuration());
    FSDataInputStream in = fs.open(inFile);
    FSDataOutputStream out = fs.create(outFile);

    int bytesRead = 0;
    byte buffer[] = new byte[256];
    while ((bytesRead = in.read(buffer)) > 0) {
        out.write(buffer, 0, bytesRead);
    }
    in.close();
    out.close();

    return 0;
}

From source file:org.apache.ambari.view.filebrowser.DownloadService.java

License:Apache License

private void zipFile(ZipOutputStream zip, String path) {
    try {/*from   w w w .j av a 2 s  . c  o  m*/
        FSDataInputStream in = getApi(context).open(path);
        zip.putNextEntry(new ZipEntry(path.substring(1)));
        byte[] chunk = new byte[1024];
        while (in.read(chunk) != -1) {
            zip.write(chunk);
        }
    } catch (IOException ex) {
        logger.error("Error zipping file " + path.substring(1) + " (file ignored): " + ex.getMessage());
    } catch (InterruptedException ex) {
        String msg = "Error zipping file " + path.substring(1) + " (file ignored): " + ex.getMessage();
        logger.error(msg);
    } finally {
        try {
            zip.closeEntry();
        } catch (IOException ex) {
            logger.error("Error closing entry " + path.substring(1) + " (file ignored): " + ex.getMessage());
        }
    }
}