Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:org.apache.flink.runtime.io.disk.iomanager.IOManagerPerformanceBenchmark.java

private final void speedTestStream(int bufferSize) throws IOException {
    final Channel.ID tmpChannel = ioManager.createChannel();
    final IntegerRecord rec = new IntegerRecord(0);

    File tempFile = null;//from w  ww. j  a  v  a 2  s  .  com
    DataOutputStream daos = null;
    DataInputStream dais = null;

    try {
        tempFile = new File(tmpChannel.getPath());

        FileOutputStream fos = new FileOutputStream(tempFile);
        daos = new DataOutputStream(new BufferedOutputStream(fos, bufferSize));

        long writeStart = System.currentTimeMillis();

        int valsLeft = NUM_INTS_WRITTEN;
        while (valsLeft-- > 0) {
            rec.setValue(valsLeft);
            rec.write(new OutputViewDataOutputStreamWrapper(daos));
        }
        daos.close();
        daos = null;

        long writeElapsed = System.currentTimeMillis() - writeStart;

        // ----------------------------------------------------------------

        FileInputStream fis = new FileInputStream(tempFile);
        dais = new DataInputStream(new BufferedInputStream(fis, bufferSize));

        long readStart = System.currentTimeMillis();

        valsLeft = NUM_INTS_WRITTEN;
        while (valsLeft-- > 0) {
            rec.read(new InputViewDataInputStreamWrapper(dais));
        }
        dais.close();
        dais = null;

        long readElapsed = System.currentTimeMillis() - readStart;

        LOG.info("File-Stream with buffer " + bufferSize + ": write " + writeElapsed + " msecs, read "
                + readElapsed + " msecs.");
    } finally {
        // close if possible
        if (daos != null) {
            daos.close();
        }
        if (dais != null) {
            dais.close();
        }
        // try to delete the file
        if (tempFile != null) {
            tempFile.delete();
        }
    }
}

From source file:org.apache.hadoop.fs.slive.ReadOp.java

@Override // Operation
List<OperationOutput> run(FileSystem fs) {
    List<OperationOutput> out = super.run(fs);
    DataInputStream is = null;
    try {//from  w ww  .j  a  va  2 s .  c o m
        Path fn = getReadFile();
        Range<Long> readSizeRange = getConfig().getReadSize();
        long readSize = 0;
        String readStrAm = "";
        if (getConfig().shouldReadFullFile()) {
            readSize = Long.MAX_VALUE;
            readStrAm = "full file";
        } else {
            readSize = Range.betweenPositive(getRandom(), readSizeRange);
            readStrAm = Helper.toByteInfo(readSize);
        }
        long timeTaken = 0;
        long chunkSame = 0;
        long chunkDiff = 0;
        long bytesRead = 0;
        long startTime = 0;
        DataVerifier vf = new DataVerifier();
        LOG.info("Attempting to read file at " + fn + " of size (" + readStrAm + ")");
        {
            // open
            startTime = Timer.now();
            is = fs.open(fn);
            timeTaken += Timer.elapsed(startTime);
            // read & verify
            VerifyOutput vo = vf.verifyFile(readSize, is);
            timeTaken += vo.getReadTime();
            chunkSame += vo.getChunksSame();
            chunkDiff += vo.getChunksDifferent();
            bytesRead += vo.getBytesRead();
            // capture close time
            startTime = Timer.now();
            is.close();
            is = null;
            timeTaken += Timer.elapsed(startTime);
        }
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.OK_TIME_TAKEN, timeTaken));
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.BYTES_READ, bytesRead));
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.SUCCESSES, 1L));
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.CHUNKS_VERIFIED, chunkSame));
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.CHUNKS_UNVERIFIED, chunkDiff));
        LOG.info("Read " + Helper.toByteInfo(bytesRead) + " of " + fn + " with " + chunkSame
                + " chunks being same as expected and " + chunkDiff
                + " chunks being different than expected in " + timeTaken + " milliseconds");

    } catch (FileNotFoundException e) {
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L));
        LOG.warn("Error with reading", e);
    } catch (BadFileException e) {
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.BAD_FILES, 1L));
        LOG.warn("Error reading bad file", e);
    } catch (IOException e) {
        out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L));
        LOG.warn("Error reading", e);
    } finally {
        if (is != null) {
            try {
                is.close();
            } catch (IOException e) {
                LOG.warn("Error closing read stream", e);
            }
        }
    }
    return out;
}

From source file:SoundManagerTest.java

/**
 * Loads a Sound from an AudioInputStream.
 *//*from   w w  w.  j  a  v a2  s . com*/
public Sound getSound(AudioInputStream audioStream) {
    if (audioStream == null) {
        return null;
    }

    // get the number of bytes to read
    int length = (int) (audioStream.getFrameLength() * audioStream.getFormat().getFrameSize());

    // read the entire stream
    byte[] samples = new byte[length];
    DataInputStream is = new DataInputStream(audioStream);
    try {
        is.readFully(samples);
        is.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    // return the samples
    return new Sound(samples);
}

From source file:eu.cloud4soa.tests.TestReqSec_Deploy.java

private ApplicationInstance createApplicationInstance(String applicationName) throws FileNotFoundException {
    URL fileURL = this.getClass().getClassLoader().getResource("SimpleWar.war");
    if (fileURL == null)
        throw new FileNotFoundException("SimpleWar.war");

    ByteArrayOutputStream bas = new ByteArrayOutputStream();

    File file = new File(fileURL.getPath());
    file.length();/*from   w ww  .  ja  v a  2s. c  o m*/
    FileInputStream fis = new FileInputStream(file);
    BufferedInputStream bis = new BufferedInputStream(fis);
    DataInputStream dis = new DataInputStream(bis);

    //Calculate digest from InputStream
    //        InputStream tempIs = new FileInputStream(file);
    String tempFileDigest = null;
    try {
        FileInputStream tempFis = new FileInputStream(file);
        tempFileDigest = DigestUtils.sha256Hex(tempFis);
    } catch (IOException ex) {
        Logger.getLogger(ApplicationDeploymentTest.class.getName()).log(Level.SEVERE, null, ex);
    }
    ApplicationInstance applicationInstance = null;
    applicationInstance = new ApplicationInstance();
    applicationInstance.setAcronym(applicationName);
    applicationInstance.setApplicationcode(applicationName);
    applicationInstance.setDigest(tempFileDigest);
    applicationInstance.setProgramminglanguage("Java");
    applicationInstance.setProgramminglanguageVersion("1.6");
    applicationInstance.setSizeQuantity(new Float(file.length()));
    applicationInstance.setVersion("1.0");
    applicationInstance.setArchiveFileName("SimpleWar");
    applicationInstance.setArchiveExtensionName(".war");

    try {
        fis.close();
        bis.close();
        dis.close();
    } catch (IOException ex) {
        Logger.getLogger(ApplicationDeploymentTest.class.getName()).log(Level.SEVERE, null, ex);
    }

    return applicationInstance;
}

From source file:br.org.indt.ndg.servlets.OpenRosaManagement.java

protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    m_openRosaBD.setPortAndAddress(SystemProperties.getServerAddress());

    String action = request.getParameter(ACTION_PARAM);

    if (SET_AVAILABLE_FOR_USER.equals(action)) {
        String selectedImei = request.getParameter(SELECTED_IMEI_PARAM);
        String[] selectedSurveyIds = request.getParameterValues(SELECTED_SURVEY_ID_PARAM);
        if (selectedImei != null && selectedSurveyIds != null) {
            log.info("IMEI: " + selectedImei);
            for (int i = 0; i < selectedSurveyIds.length; i++) {
                log.info("Survey id:" + selectedSurveyIds[i]);
            }//from  w  w  w.ja va2 s  .  c o  m
        }
        boolean result = m_openRosaBD.makeSurveysAvailableForImei(selectedImei, selectedSurveyIds);
        request.setAttribute(RESULT_ATTR, result);
        dispatchSurveysForUserSelectionPage(request, response);
    } else if (EXPORT_RESULTS_FOR_USER.equals(action)) {
        ServletOutputStream outputStream = null;
        FileInputStream fileInputStream = null;
        DataInputStream dataInputStream = null;
        File file = null;
        try {
            String zipFilename = m_openRosaBD.exportZippedResultsForUser("223344556677");
            file = new File(zipFilename);
            int length = 0;
            outputStream = response.getOutputStream();
            ServletContext context = getServletConfig().getServletContext();
            String mimetype = context.getMimeType("application/octet-stream");

            response.setContentType(mimetype);
            response.setContentLength((int) file.length());
            response.setHeader("Content-Disposition", "attachment; filename=\"" + zipFilename + "\"");

            byte[] bbuf = new byte[1024];
            fileInputStream = new FileInputStream(file);
            dataInputStream = new DataInputStream(fileInputStream);

            while ((dataInputStream != null) && ((length = dataInputStream.read(bbuf)) != -1)) {
                outputStream.write(bbuf, 0, length);
            }
            outputStream.flush();
        } catch (FileNotFoundException ex) {
            ex.printStackTrace();
        } finally {
            try {
                if (fileInputStream != null)
                    fileInputStream.close();
                if (dataInputStream != null)
                    dataInputStream.close();
                if (fileInputStream != null)
                    fileInputStream.close();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
            file.delete();
        }
    }
}

From source file:com.bahmanm.karun.PackageCollection.java

/**
 * Reads package information from a package directory.
 * //from   w ww . jav  a 2 s  .c o m
 * @param pkgDir Package directory
 * @return Package
 */
private Package readPackage(File pkgDir) throws FileNotFoundException, IOException {
    File f = new File(pkgDir.getAbsolutePath() + "/desc");
    FileInputStream fis = new FileInputStream(f);
    DataInputStream dis = new DataInputStream(fis);
    BufferedReader br = new BufferedReader(new InputStreamReader(dis));
    String line = null;
    Package pkg = new Package();
    try {
        boolean name = false;
        boolean desc = false;
        boolean version = false;
        while ((line = br.readLine()) != null) {
            line = StringUtils.normalizeSpace(line);
            if (line.equals("%NAME%")) {
                name = name ? false : true;
            } else if (line.equals("%VERSION%")) {
                version = version ? false : true;
            } else if (line.equals("%DESC%")) {
                desc = desc ? false : true;
            } else if (name) {
                pkg.setName(line);
                name = false;
            } else if (version) {
                pkg.setRepoVersion(line);
                version = false;
            } else if (desc) {
                pkg.setDescription(line);
                desc = false;
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(PackageCollection.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            br.close();
            dis.close();
            fis.close();
        } catch (IOException ioex) {
            throw new IOException("Error closing stream or reader: " + ioex.getMessage());
        }
    }
    return pkg;
}

From source file:com.datatorrent.contrib.hdht.HDHTWalManager.java

private void copyWalPart(WalPosition startPosition, WalPosition endPosition, long oldWalKey) {
    try {//from www  .j  av  a 2  s  .  c o m
        if (bfs.exists(oldWalKey, WAL_FILE_PREFIX + endPosition.fileId)) {
            DataInputStream in = bfs.getInputStream(oldWalKey, WAL_FILE_PREFIX + endPosition.fileId);
            int length = (int) (startPosition.fileId == endPosition.fileId
                    ? endPosition.offset - startPosition.offset
                    : endPosition.offset);
            int offset = (int) (startPosition.fileId == endPosition.fileId ? startPosition.offset : 0);
            logger.info("length = {} offset = {} start offset = {} end offset = {} File = {}", length, offset,
                    startPosition, endPosition);
            if (copyBuffer == null) {
                copyBuffer = new byte[BUFFER_SIZE];
            }
            IOUtils.skip(in, offset);
            while (length > 0) {
                int readBytes = IOUtils.read(in, copyBuffer, 0, length < BUFFER_SIZE ? length : BUFFER_SIZE);
                append(copyBuffer, readBytes);
                length -= readBytes;
            }
            in.close();

            flushWal();
            if (writer != null) {
                walSize = writer.getSize();
            }
            logger.debug("wal size so far = {}", walSize);
        }
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}

From source file:edu.jhuapl.tinkerpop.AccumuloGraphConfiguration.java

private PasswordToken deserailize(byte[] tokenBytes) {
    PasswordToken type = null;//w  ww . j av  a 2  s. com
    try {
        type = PasswordToken.class.newInstance();
    } catch (Exception e) {
        throw new IllegalArgumentException("Cannot instantiate " + PasswordToken.class.getName(), e);
    }
    ByteArrayInputStream bais = new ByteArrayInputStream(tokenBytes);
    DataInputStream in = new DataInputStream(bais);
    try {
        type.readFields(in);
    } catch (IOException e) {
        throw new IllegalArgumentException(
                "Cannot deserialize provided byte array as class " + PasswordToken.class.getName(), e);
    }
    try {
        in.close();
    } catch (IOException e) {
        throw new IllegalStateException("Shouldn't happen", e);
    }
    return type;
}

From source file:com.hdfs.concat.crush.CrushTest.java

@Test
public void bucketing() throws Exception {
    File in = tmp.newFolder("in");

    Counters expectedCounters = new Counters();
    List<String> expectedBucketFiles = new ArrayList<String>();

    /*//from  www .j a  v  a2  s.c o m
     * Create a hierarchy of directories. Directories are distinguished by a trailing slash in these comments.
     *
     *   1/
     *         1.1/
     *               file1 10 bytes
     *               file2 20 bytes
     *               file3 30 bytes
     *               file4 41 bytes
     *               file5 15 bytes
     *               file6 30 bytes
     *               file7   20 bytes
     *         1.2/
     *               file1 20 bytes
     *               file2 10 bytes
     *         1.3/
     *   2/
     *         file1 70 bytes
     *         file2 30 bytes
     *         file3 25 bytes
     *         file4 30 bytes
     *         file5 35 bytes
     *         2.1/
     *               file1 10 bytes
     *         2.2/
     *               file1 25 bytes
     *               file2 15 bytes
     *               file3 35 bytes
     *         2.3/
     *               file1 41 bytes
     *               file2 10 bytes
     *         2.4/
     *               2.4.1/
     *                     file1 100 bytes
     *                     file2   30 bytes
     *               2.4.2/
     *                     file1 20 bytes
     *                     file2 20 bytes
     *                     file3 10 bytes
     */

    /*
     * in contains 2 dirs and no files so it is skipped.
     *
     *    in/
     *          1/
     *          2/
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    tmp.newFolder("in/1");
    File dir2 = tmp.newFolder("in/2");

    /*
     * in/1 contains three dirs and no files so it is skipped.
     *
     *    in/
     *          1/
     *                1.1/
     *                1.2/
     *                1.3/
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    File dir1_1 = tmp.newFolder("in/1/1.1");
    File dir1_2 = tmp.newFolder("in/1/1.2");
    tmp.newFolder("in/1/1.3");

    /*
     * in/2 contains five files and four dirs.
     *
     *    in/
     *          2/
     *               file1 70 bytes
     *               file2 30 bytes
     *               file3 25 bytes
     *               file4 30 bytes
     *               file5 35 bytes
     *                2.1/
     *                2.2/
     *                2.3/
     *                2.4/
     *
     *    0                  1                  2
     *    file5   35      file2 30      file4 30
     *                      file3 25
     *
     * Buckets 0 and 2 have a single file each so they are ignored.
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_ELIGIBLE, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 5);
    expectedCounters.incrCounter(MapperCounter.FILES_ELIGIBLE, 2);
    expectedCounters.incrCounter(MapperCounter.FILES_SKIPPED, 3);

    File dir2_1 = tmp.newFolder("in/2/2.1");
    File dir2_2 = tmp.newFolder("in/2/2.2");
    File dir2_3 = tmp.newFolder("in/2/2.3");
    tmp.newFolder("in/2/2.4");

    createFile(dir2, "file1", 70);
    createFile(dir2, "file2", 30);
    createFile(dir2, "file3", 25);
    createFile(dir2, "file4", 30);
    createFile(dir2, "file5", 35);

    expectedBucketFiles
            .add(format("%s   %s", dir2.getAbsolutePath() + "-1", new File(dir2, "file2").getAbsolutePath()));
    expectedBucketFiles
            .add(format("%s   %s", dir2.getAbsolutePath() + "-1", new File(dir2, "file3").getAbsolutePath()));

    /*
     * in/1/1.1 contains seven files and no dirs.
     *
     *    in/
     *          1/
     *                1.1/
     *                     file1 10 bytes
     *                     file2 20 bytes
     *                     file3 30 bytes
     *                     file4 41 bytes
     *                     file5 15 bytes
     *                     file6 30 bytes
     *                     file7   20 bytes
     *
     *    0                  1                  2
     *    file3 30      file6 30      file2 20
     *    file5 15      file1 10      file7 20
     *
     * file4 is > 50 * 0.8 so it is ignored.
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_ELIGIBLE, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 7);
    expectedCounters.incrCounter(MapperCounter.FILES_ELIGIBLE, 6);
    expectedCounters.incrCounter(MapperCounter.FILES_SKIPPED, 1);

    createFile(dir1_1, "file1", 10);
    createFile(dir1_1, "file2", 20);
    createFile(dir1_1, "file3", 30);
    createFile(dir1_1, "file4", 41);
    createFile(dir1_1, "file5", 15);
    createFile(dir1_1, "file6", 30);
    createFile(dir1_1, "file7", 20);

    expectedBucketFiles.add(
            format("%s   %s", dir1_1.getAbsolutePath() + "-0", new File(dir1_1, "file3").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir1_1.getAbsolutePath() + "-0", new File(dir1_1, "file5").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir1_1.getAbsolutePath() + "-1", new File(dir1_1, "file6").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir1_1.getAbsolutePath() + "-1", new File(dir1_1, "file1").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir1_1.getAbsolutePath() + "-2", new File(dir1_1, "file2").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir1_1.getAbsolutePath() + "-2", new File(dir1_1, "file7").getAbsolutePath()));

    /*
     * in/1/1.2 contains to files.
     *
     *    in/
     *          1/
     *                1.2/
     *                     file1 20 bytes
     *                     file2 10 bytes
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_ELIGIBLE, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 2);
    expectedCounters.incrCounter(MapperCounter.FILES_ELIGIBLE, 2);

    createFile(dir1_2, "file1", 20);
    createFile(dir1_2, "file2", 10);

    expectedBucketFiles.add(
            format("%s   %s", dir1_2.getAbsolutePath() + "-0", new File(dir1_2, "file1").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir1_2.getAbsolutePath() + "-0", new File(dir1_2, "file2").getAbsolutePath()));

    /*
     * in/1/1.3 is empty.
     *
     *    in/
     *          1/
     *                1.3/
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    tmp.newFolder("in/1/1.3");

    /*
     * in/2/2.1 contains on file.
     *
     *    in/
     *          2/
     *                2.1/
     *                     file1 10 bytes
     *
     * Single file dirs are ignored.
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.FILES_SKIPPED, 1);

    createFile(dir2_1, "file1", 10);

    /*
     * in/2/2.2 contains three files.
     *
     *    in/
     *          2/
     *                2.2/
     *                     file1 25 bytes
     *                     file2 15 bytes
     *                     file3 35 bytes
     *
     *    0                  1
     *    file3 35      file1 25
     *                      file2 15
     *
     * Bucket 0 with a single file is ignored.
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_ELIGIBLE, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 3);
    expectedCounters.incrCounter(MapperCounter.FILES_ELIGIBLE, 2);
    expectedCounters.incrCounter(MapperCounter.FILES_SKIPPED, 1);

    createFile(dir2_2, "file1", 25);
    createFile(dir2_2, "file2", 15);
    createFile(dir2_2, "file3", 35);

    expectedBucketFiles.add(
            format("%s   %s", dir2_2.getAbsolutePath() + "-1", new File(dir2_2, "file1").getAbsolutePath()));
    expectedBucketFiles.add(
            format("%s   %s", dir2_2.getAbsolutePath() + "-1", new File(dir2_2, "file2").getAbsolutePath()));

    /*
     * in/2/2.3 contains 2 files.
     *
     *    in/
     *          2/
     *                2.3/
     *                     file1 41 bytes
     *                     file2 10 bytes
     *
     * file1 is too big and leaving file2 as a single file, which is also ignored.
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 2);
    expectedCounters.incrCounter(MapperCounter.FILES_SKIPPED, 2);

    createFile(dir2_3, "file1", 41);
    createFile(dir2_3, "file2", 10);

    /*
     * in/2/2.4 contains two sub directories and no files.
     *
     *    in/
     *          2/
     *               2.4/
     *                     2.4.1/
     *                     2.4.2/
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    tmp.newFolder("in/2/2.4");

    File dir2_4_1 = tmp.newFolder("in/2/2.4/2.4.1");
    File dir2_4_2 = tmp.newFolder("in/2/2.4/2.4.2");

    /*
     *    in/
     *          2/
     *               2.4/
     *                     2.4.1/
     *                           file1 100 bytes
     *                           file2   30 bytes
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_SKIPPED, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 2);
    expectedCounters.incrCounter(MapperCounter.FILES_SKIPPED, 2);

    createFile(dir2_4_1, "file1", 100);
    createFile(dir2_4_1, "file2", 30);

    /*
     *    in/
     *          2/
     *               2.4/
     *                     2.4.2/
     *                           file1 20 bytes
     *                           file2 20 bytes
     *                           file3 10 bytes
     *   0
     *   file1 20
     *   file2 20
     *   file3 10
     */
    expectedCounters.incrCounter(MapperCounter.DIRS_FOUND, 1);
    expectedCounters.incrCounter(MapperCounter.DIRS_ELIGIBLE, 1);

    expectedCounters.incrCounter(MapperCounter.FILES_FOUND, 3);
    expectedCounters.incrCounter(MapperCounter.FILES_ELIGIBLE, 3);

    createFile(dir2_4_2, "file1", 20);
    createFile(dir2_4_2, "file2", 20);
    createFile(dir2_4_2, "file3", 10);

    expectedBucketFiles.add(format("%s   %s", dir2_4_2.getAbsolutePath() + "-0",
            new File(dir2_4_2, "file1").getAbsolutePath()));
    expectedBucketFiles.add(format("%s   %s", dir2_4_2.getAbsolutePath() + "-0",
            new File(dir2_4_2, "file2").getAbsolutePath()));
    expectedBucketFiles.add(format("%s   %s", dir2_4_2.getAbsolutePath() + "-0",
            new File(dir2_4_2, "file3").getAbsolutePath()));

    Crush crush = new Crush();

    crush.setConf(job);
    crush.setFileSystem(fileSystem);

    /*
     * Call these in the same order that run() does.
     */
    crush.createJobConfAndParseArgs("--compress=none", "--max-file-blocks=1", in.getAbsolutePath(),
            new File(tmp.getRoot(), "out").getAbsolutePath(), "20101124171730");
    crush.writeDirs();

    /*
     * Verify bucket contents.
     */

    List<String> actualBucketFiles = new ArrayList<String>();

    Text key = new Text();
    Text value = new Text();

    Reader reader = new Reader(FileSystem.get(job), crush.getBucketFiles(), job);

    while (reader.next(key, value)) {
        actualBucketFiles.add(format("%s\t%s", key, value));
    }

    reader.close();

    Collections.sort(expectedBucketFiles);
    Collections.sort(actualBucketFiles);

    assertThat(actualBucketFiles, equalTo(expectedBucketFiles));

    /*
     * Verify the partition map.
     */
    Reader partitionMapReader = new Reader(FileSystem.get(job), crush.getPartitionMap(), job);

    IntWritable partNum = new IntWritable();

    Map<String, Integer> actualPartitions = new HashMap<String, Integer>();

    while (partitionMapReader.next(key, partNum)) {
        actualPartitions.put(key.toString(), partNum.get());
    }

    partitionMapReader.close();

    /*
     * These crush files need to allocated into 5 partitions:
     *
     * in/2-1                  55 bytes
     * in/1/1.1-0            45 bytes
     * in/1/1.1-2            40 bytes
     * in/1/1.1-1            40 bytes
     * in/1/1.2-0            30 bytes
     * in/2/2.2-1            40 bytes
     * in/2/2.4/2.4.2-0   50 bytes
     *
     *    0                     1                                 2                        3                        4
     *    in/2-1 55         in/2/2.4/2.4.2-0   50   in/1/1.1-0   45   in/1/1.1-2   40   in/1/1.1-1   40
     *                                                                                  in/2/2.2-1   40   in/1/1.2-0   39
     */
    Map<String, Integer> expectedPartitions = new HashMap<String, Integer>();

    //TODO: this may not be deterministic due to jvm/hashmap/filesystem
    expectedPartitions.put(dir2.getAbsolutePath() + "-1", 0);
    expectedPartitions.put(dir2_4_2.getAbsolutePath() + "-0", 1);
    expectedPartitions.put(dir1_1.getAbsolutePath() + "-0", 2);
    expectedPartitions.put(dir1_1.getAbsolutePath() + "-2", 4);
    expectedPartitions.put(dir2_2.getAbsolutePath() + "-1", 3);
    expectedPartitions.put(dir1_1.getAbsolutePath() + "-1", 3);
    expectedPartitions.put(dir1_2.getAbsolutePath() + "-0", 4);

    assertThat(actualPartitions, equalTo(expectedPartitions));

    /*
     * Verify counters.
     */
    Counters actualCounters = new Counters();

    DataInputStream countersStream = FileSystem.get(job).open(crush.getCounters());

    actualCounters.readFields(countersStream);

    countersStream.close();

    assertThat(actualCounters, equalTo(expectedCounters));
}

From source file:bobs.is.compress.sevenzip.SevenZFile.java

private Archive readHeaders(final byte[] password) throws IOException {
    final byte[] signature = new byte[6];
    file.readFully(signature);//from w  w  w. j  a  v  a 2  s  .  c  o  m
    if (!Arrays.equals(signature, sevenZSignature)) {
        throw new IOException("Bad 7z signature");
    }
    // 7zFormat.txt has it wrong - it's first major then minor
    final byte archiveVersionMajor = file.readByte();
    final byte archiveVersionMinor = file.readByte();
    if (archiveVersionMajor != 0) {
        throw new IOException(
                String.format("Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor));
    }

    final long startHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(file.readInt());
    final StartHeader startHeader = readStartHeader(startHeaderCrc);

    final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
    if (nextHeaderSizeInt != startHeader.nextHeaderSize) {
        throw new IOException("cannot handle nextHeaderSize " + startHeader.nextHeaderSize);
    }
    file.seek(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
    final byte[] nextHeader = new byte[nextHeaderSizeInt];
    file.readFully(nextHeader);
    final CRC32 crc = new CRC32();
    crc.update(nextHeader);
    if (startHeader.nextHeaderCrc != crc.getValue()) {
        throw new IOException("NextHeader CRC mismatch");
    }

    final ByteArrayInputStream byteStream = new ByteArrayInputStream(nextHeader);
    DataInputStream nextHeaderInputStream = new DataInputStream(byteStream);
    Archive archive = new Archive();
    int nid = nextHeaderInputStream.readUnsignedByte();
    if (nid == NID.kEncodedHeader) {
        nextHeaderInputStream = readEncodedHeader(nextHeaderInputStream, archive, password);
        // Archive gets rebuilt with the new header
        archive = new Archive();
        nid = nextHeaderInputStream.readUnsignedByte();
    }
    if (nid == NID.kHeader) {
        readHeader(nextHeaderInputStream, archive);
        nextHeaderInputStream.close();
    } else {
        throw new IOException("Broken or unsupported archive: no Header");
    }
    return archive;
}