Example usage for com.google.common.io LittleEndianDataInputStream LittleEndianDataInputStream

List of usage examples for com.google.common.io LittleEndianDataInputStream LittleEndianDataInputStream

Introduction

In this page you can find the example usage for com.google.common.io LittleEndianDataInputStream LittleEndianDataInputStream.

Prototype

public LittleEndianDataInputStream(InputStream in) 

Source Link

Document

Creates a LittleEndianDataInputStream that wraps the given stream.

Usage

From source file:org.linguafranca.pwdb.kdbx.stream_3_1.KdbxSerializer.java

/**
 * Populate a KdbxHeader from the input stream supplied
 * @param kdbxHeader a header to be populated
 * @param inputStream an input stream//from   www .j  a v a 2 s .  c o  m
 * @return the populated KdbxHeader
 * @throws IOException on error
 */
public static KdbxHeader readKdbxHeader(KdbxHeader kdbxHeader, InputStream inputStream) throws IOException {

    MessageDigest digest = Encryption.getMessageDigestInstance();
    // we do not close this stream, otherwise we lose our place in the underlying stream
    DigestInputStream digestInputStream = new DigestInputStream(inputStream, digest);
    // we do not close this stream, otherwise we lose our place in the underlying stream
    LittleEndianDataInputStream ledis = new LittleEndianDataInputStream(digestInputStream);

    if (!verifyMagicNumber(ledis)) {
        throw new IllegalStateException("Magic number did not match");
    }

    if (!verifyFileVersion(ledis)) {
        throw new IllegalStateException("File version did not match");
    }

    byte headerType;
    while ((headerType = ledis.readByte()) != HeaderType.END) {
        switch (headerType) {

        case HeaderType.COMMENT:
            getByteArray(ledis);
            break;

        case HeaderType.CIPHER_ID:
            kdbxHeader.setCipherUuid(getByteArray(ledis));
            break;

        case HeaderType.COMPRESSION_FLAGS:
            kdbxHeader.setCompressionFlags(getInt(ledis));
            break;

        case HeaderType.MASTER_SEED:
            kdbxHeader.setMasterSeed(getByteArray(ledis));
            break;

        case HeaderType.TRANSFORM_SEED:
            kdbxHeader.setTransformSeed(getByteArray(ledis));
            break;

        case HeaderType.TRANSFORM_ROUNDS:
            kdbxHeader.setTransformRounds(getLong(ledis));
            break;

        case HeaderType.ENCRYPTION_IV:
            kdbxHeader.setEncryptionIv(getByteArray(ledis));
            break;

        case HeaderType.PROTECTED_STREAM_KEY:
            kdbxHeader.setProtectedStreamKey(getByteArray(ledis));
            break;

        case HeaderType.STREAM_START_BYTES:
            kdbxHeader.setStreamStartBytes(getByteArray(ledis));
            break;

        case HeaderType.INNER_RANDOM_STREAM_ID:
            kdbxHeader.setInnerRandomStreamId(getInt(ledis));
            break;

        default:
            throw new IllegalStateException("Unknown File Header");
        }
    }

    // consume length etc. following END flag
    getByteArray(ledis);

    kdbxHeader.setHeaderHash(digest.digest());
    return kdbxHeader;
}

From source file:net.sf.mzmine.modules.rawdatamethods.rawdataimport.fileformats.XcaliburRawFileReadTask.java

/**
 * This method reads the dump of the RAW data file produced by RAWdump.exe
 * utility (see RAWdump.cpp source for details).
 *//*from ww w. j  a v  a2s.c o m*/
private void readRAWDump(InputStream dumpStream) throws IOException {

    String line;
    while ((line = TextUtils.readLineFromStream(dumpStream)) != null) {

        if (isCanceled()) {
            return;
        }

        if (line.startsWith("ERROR: ")) {
            throw (new IOException(line.substring("ERROR: ".length())));
        }

        if (line.startsWith("NUMBER OF SCANS: ")) {
            totalScans = Integer.parseInt(line.substring("NUMBER OF SCANS: ".length()));
        }

        if (line.startsWith("SCAN NUMBER: ")) {
            scanNumber = Integer.parseInt(line.substring("SCAN NUMBER: ".length()));
        }

        if (line.startsWith("SCAN FILTER: ")) {

            /*
             * A typical filter line for MS/MS scan looks like this:
             * 
             * ITMS - c ESI d Full ms3 587.03@cid35.00 323.00@cid35.00
             */
            Pattern p = Pattern.compile("ms(\\d).* (\\d+\\.\\d+)@");
            Matcher m = p.matcher(line);
            if (m.find()) {
                msLevel = Integer.parseInt(m.group(1));

                // Initially we obtain precursor m/z from this filter line,
                // even though the precision is not good. Later more precise
                // precursor m/z may be reported using PRECURSOR: line, but
                // sometimes it is missing (equal to 0)
                precursorMZ = Double.parseDouble(m.group(2));
            } else {
                msLevel = 1;
            }

        }

        if (line.startsWith("RETENTION TIME: ")) {
            // Retention time in the RAW file is reported in minutes.
            retentionTime = Double.parseDouble(line.substring("RETENTION TIME: ".length()));
        }

        if (line.startsWith("PRECURSOR: ")) {
            String tokens[] = line.split(" ");
            double token2 = Double.parseDouble(tokens[1]);
            int token3 = Integer.parseInt(tokens[2]);
            if (token2 > 0) {
                precursorMZ = token2;
                precursorCharge = token3;
            }
        }

        if (line.startsWith("DATA POINTS: ")) {
            int numOfDataPoints = Integer.parseInt(line.substring("DATA POINTS: ".length()));

            DataPoint completeDataPoints[] = new DataPoint[numOfDataPoints];

            // Because Intel CPU is using little endian natively, we
            // need to use LittleEndianDataInputStream instead of normal
            // Java DataInputStream, which is big-endian.
            LittleEndianDataInputStream dis = new LittleEndianDataInputStream(dumpStream);
            for (int i = 0; i < numOfDataPoints; i++) {
                double mz = dis.readDouble();
                double intensity = dis.readDouble();
                completeDataPoints[i] = new SimpleDataPoint(mz, intensity);
            }
            dis.close();

            boolean centroided = ScanUtils.isCentroided(completeDataPoints);

            DataPoint optimizedDataPoints[] = ScanUtils.removeZeroDataPoints(completeDataPoints, centroided);

            /*
             * If this scan is a full scan (ms level = 1), it means that the
             * previous scans stored in the stack, are complete and ready to
             * be written to the raw data file.
             */
            if (msLevel == 1) {
                while (!parentStack.isEmpty()) {
                    SimpleScan currentScan = parentStack.removeFirst();
                    newMZmineFile.addScan(currentScan);
                }
            }

            // Setting the current parentScan
            int parentScan = -1;
            if (msLevel > 1) {
                parentScan = parentTreeValue[msLevel - 1];

                if (!parentStack.isEmpty()) {
                    for (SimpleScan s : parentStack) {
                        if (s.getScanNumber() == parentScan) {
                            s.addFragmentScan(scanNumber);
                        }
                    }
                }
            }

            // Setting the parent scan number for this level of fragments
            parentTreeValue[msLevel] = scanNumber;

            SimpleScan newScan = new SimpleScan(null, scanNumber, msLevel, retentionTime, parentScan,
                    precursorMZ, precursorCharge, null, optimizedDataPoints, centroided);

            parentStack.add(newScan);
            parsedScans++;

            // Clean the variables for next scan
            scanNumber = 0;
            msLevel = 0;
            retentionTime = 0;
            precursorMZ = 0;
            precursorCharge = 0;

        }

    }

    // Add remaining scans in the parentStack
    while (!parentStack.isEmpty()) {
        SimpleScan currentScan = parentStack.removeFirst();
        newMZmineFile.addScan(currentScan);
    }

}

From source file:com.koda.persistence.rawfs.RawFSStore.java

@Override
public OffHeapCache load(ProgressListener pl) throws IOException {

    // Check that the store in an IDLE state
    if (!state.compareAndSet(State.IDLE, State.LOAD)) {
        throw new IOException("Can't load. Incompatible state.");
    }/* w ww  .j a  v  a2 s  .c  o  m*/

    FileInputStream fis = null;
    DataInput dis = null;
    ByteOrder nativeByteOrder = ByteOrder.nativeOrder();
    boolean littleEndian = nativeByteOrder == ByteOrder.LITTLE_ENDIAN;
    Codec codec = null;
    try {
        // Root directory for
        File f = getLatestStoreFile();
        // Nothing was found
        if (f == null) {
            LOG.warn("no saved caches found in :" + diskStoreRoots + File.separator + " for cache [" + storeName
                    + "]");
            return null;
        }

        fis = new FileInputStream(f);
        if (littleEndian) {
            // Use Guava
            dis = new LittleEndianDataInputStream(fis);
        } else {
            // Use standard JDK java.io
            dis = new DataInputStream(fis);
        }
        fileChannel = fis.getChannel();
        long fileLength = fileChannel.size();
        int cfgSize = dis.readInt();
        buffer.clear();
        buffer.limit(cfgSize);
        int total = 0;
        // Read all bytes from channel till 'cfgSize' limit
        while ((total += fileChannel.read(buffer)) < cfgSize)
            ;

        buffer.flip();
        ExtCacheConfiguration cfg = ExtCacheConfiguration.read(buffer, null);

        // Create cache instance first to make sure
        // that we set System property COMPRESSION_THRESHOLD
        cache = new OffHeapCache(cfg);

        // Compression codec used
        codec = cfg.getCacheConfig().getDiskStoreConfiguration().getDbCompressionType().getCodec();

        long totalRead = 4 + cfgSize;
        while (totalRead < fileLength) {
            int blobSize = dis.readInt();
            //LOG.info("\n\nBlob size="+blobSize+"\n\n");
            buffer.clear();
            buffer.limit(blobSize);
            total = 0;
            // Read all bytes from channel till 'cfgSize' limit
            while ((total += fileChannel.read(buffer)) < blobSize)
                ;
            //LOG.info("Total read ="+total);   
            buffer.flip();
            //LOG.info("[LOAD] buffer="+buffer.limit());
            if (codec != null) {
                // Decompress data
                tempBuffer.clear();
                codec.decompress(buffer, tempBuffer);
                //tempBuffer.flip();
                //LOG.info("[LOAD] decompressed pos="+tempBuffer.position()+" limit="+tempBuffer.limit());
                loadDataFromTo(tempBuffer, cache);
            } else {
                loadDataFromTo(buffer, cache);
            }
            totalRead += blobSize + 4;
            // check cancelled
            if (isCanceled()) {
                // Notify listener
                if (pl != null) {
                    pl.canceled();
                }
                throw new RuntimeException("canceled");
            } else {
                if (pl != null) {
                    pl.progress(totalRead, fileLength);
                }
            }
        }

        if (pl != null) {
            pl.finished();
        }

        return cache;
    } catch (Exception e) {

        if (pl != null) {
            pl.error(e, true);
        }
        throw new IOException(e);

    } finally {
        if (isCanceled()) {
            cancelRequested = false;
        }
        if (fileChannel != null && fileChannel.isOpen()) {
            fileChannel.close();
        }
        //TODO
        state.compareAndSet(State.LOAD, State.IDLE);
    }

}

From source file:org.bimserver.client.ClientIfcModel.java

private void processGeometryInputStream(InputStream inputStream, Map<Long, Long> geometryInfoOidToOid)
        throws IOException, GeometryException, IfcModelInterfaceException {
    try (LittleEndianDataInputStream dataInputStream = new LittleEndianDataInputStream(inputStream)) {
        boolean done = false;
        while (!done) {
            byte type = dataInputStream.readByte();
            System.out.println(type);
            if (type == 0) {
                String protocol = dataInputStream.readUTF();
                if (!protocol.equals("BGS")) {
                    throw new GeometryException("Protocol != BGS (" + protocol + ")");
                }//from  w w w.  j  a  va2  s. c  om
                byte formatVersion = dataInputStream.readByte();
                if (formatVersion != 8) {
                    throw new GeometryException("Unsupported version " + formatVersion + " / 8");
                }
                int skip = 4 - (7 % 4);
                if (skip != 0 && skip != 4) {
                    dataInputStream.readFully(new byte[skip]);
                }
                for (int i = 0; i < 6; i++) {
                    dataInputStream.readDouble();
                }
            } else if (type == 5) {
                dataInputStream.readFully(new byte[7]);
                dataInputStream.readLong(); // roid
                long geometryInfoOid = dataInputStream.readLong();
                GeometryInfo geometryInfo = (GeometryInfo) get(geometryInfoOid);
                if (geometryInfo == null) {
                    geometryInfo = create(GeometryInfo.class);
                }
                add(geometryInfoOid, geometryInfo);

                Long ifcProductOid = geometryInfoOidToOid.get(geometryInfoOid);
                if (ifcProductOid == null) {
                    throw new GeometryException("Missing geometry info id: " + geometryInfoOid);
                }
                IfcProduct ifcProduct = (IfcProduct) get(ifcProductOid);
                ifcProduct.setGeometry(geometryInfo);

                org.bimserver.models.geometry.Vector3f minBounds = GeometryFactory.eINSTANCE.createVector3f();
                minBounds.setX(dataInputStream.readDouble());
                minBounds.setY(dataInputStream.readDouble());
                minBounds.setZ(dataInputStream.readDouble());

                org.bimserver.models.geometry.Vector3f maxBounds = GeometryFactory.eINSTANCE.createVector3f();
                maxBounds.setX(dataInputStream.readDouble());
                maxBounds.setY(dataInputStream.readDouble());
                maxBounds.setZ(dataInputStream.readDouble());

                geometryInfo.setMinBounds(minBounds);
                geometryInfo.setMaxBounds(maxBounds);

                byte[] transformation = new byte[16 * 8];
                dataInputStream.readFully(transformation);
                geometryInfo.setTransformation(transformation);

                long geometryDataOid = dataInputStream.readLong();
                GeometryData geometryData = (GeometryData) get(geometryDataOid);
                if (geometryData == null) {
                    geometryData = GeometryFactory.eINSTANCE.createGeometryData();
                    add(geometryDataOid, geometryData);
                }
                geometryInfo.setData(geometryData);
            } else if (type == 3) {
                throw new GeometryException("Parts not supported");
            } else if (type == 1) {
                dataInputStream.readFully(new byte[7]);
                long geometryDataOid = dataInputStream.readLong();

                GeometryData geometryData = (GeometryData) get(geometryDataOid);
                if (geometryData == null) {
                    geometryData = GeometryFactory.eINSTANCE.createGeometryData();
                    add(geometryDataOid, geometryData);
                }

                int nrIndices = dataInputStream.readInt();
                byte[] indices = new byte[nrIndices * 4];
                dataInputStream.readFully(indices);
                geometryData.setIndices(indices);

                int nrVertices = dataInputStream.readInt();
                byte[] vertices = new byte[nrVertices * 4];
                dataInputStream.readFully(vertices);
                geometryData.setVertices(vertices);

                int nrNormals = dataInputStream.readInt();
                byte[] normals = new byte[nrNormals * 4];
                dataInputStream.readFully(normals);
                geometryData.setNormals(normals);

                int nrMaterials = dataInputStream.readInt();
                byte[] materials = new byte[nrMaterials * 4];
                dataInputStream.readFully(materials);
                geometryData.setMaterials(materials);
            } else if (type == 6) {
                done = true;
            } else {
                throw new GeometryException("Unimplemented type: " + type);
            }
        }
    }
}

From source file:com.google.devtools.build.android.AndroidCompiledDataDeserializer.java

@Override
public void read(Path inPath, KeyValueConsumers consumers) {
    Stopwatch timer = Stopwatch.createStarted();
    try (ZipFile zipFile = new ZipFile(inPath.toFile())) {
        Enumeration<? extends ZipEntry> resourceFiles = zipFile.entries();

        while (resourceFiles.hasMoreElements()) {
            ZipEntry resourceFile = resourceFiles.nextElement();
            String fileZipPath = resourceFile.getName();
            int resourceSubdirectoryIndex = fileZipPath.indexOf('_', fileZipPath.lastIndexOf('/'));
            Path filePath = Paths
                    .get(String.format("%s%c%s", fileZipPath.substring(0, resourceSubdirectoryIndex), '/',
                            fileZipPath.substring(resourceSubdirectoryIndex + 1)));

            String shortPath = filePath.getParent().getFileName() + "/" + filePath.getFileName();

            if (filteredResources.contains(shortPath) && !Files.exists(filePath)) {
                // Skip files that were filtered out during analysis.
                // TODO(asteinb): Properly filter out these files from android_library symbol files during
                // analysis instead, and remove this list.
                continue;
            }/*ww w  .ja  v  a 2s.c o m*/

            try (InputStream resourceFileStream = zipFile.getInputStream(resourceFile)) {
                final String[] dirNameAndQualifiers = filePath.getParent().getFileName().toString()
                        .split(SdkConstants.RES_QUALIFIER_SEP);
                Factory fqnFactory = Factory.fromDirectoryName(dirNameAndQualifiers);

                if (fileZipPath.endsWith(".attributes")) {
                    readAttributesFile(resourceFileStream, inPath.getFileSystem(), consumers);
                } else {
                    LittleEndianDataInputStream dataInputStream = new LittleEndianDataInputStream(
                            resourceFileStream);

                    int magicNumber = dataInputStream.readInt();
                    int formatVersion = dataInputStream.readInt();
                    int numberOfEntries = dataInputStream.readInt();
                    int resourceType = dataInputStream.readInt();

                    if (resourceType == 0) { // 0 is a resource table
                        readResourceTable(dataInputStream, consumers);
                    } else if (resourceType == 1) { // 1 is a resource file
                        readCompiledFile(dataInputStream, consumers, fqnFactory);
                    } else {
                        throw new DeserializationException("aapt2 version mismatch.",
                                new DeserializationException(String.format(
                                        "Unexpected tag for resourceType %s expected 0 or 1 in %s."
                                                + "\n Last known good values:"
                                                + "\n\tmagicNumber 1414545729 (is %s)"
                                                + "\n\tformatVersion 1 (is %s)"
                                                + "\n\tnumberOfEntries 1 (is %s)",
                                        resourceType, fileZipPath, magicNumber, formatVersion,
                                        numberOfEntries)));
                    }
                }
            }
        }
    } catch (IOException e) {
        throw new DeserializationException("Error deserializing " + inPath, e);
    } finally {
        logger.fine(
                String.format("Deserialized in compiled merged in %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
    }
}

From source file:com.xyphos.vmtgen.GUI.java

private void lstFilesValueChanged(javax.swing.event.ListSelectionEvent evt) {// GEN-FIRST:event_lstFilesValueChanged
    if (!evt.getValueIsAdjusting() && (-1 != lstFiles.getSelectedIndex())) {
        String file = lstFiles.getSelectedValue().toString();

        // set keywords based on file name
        setKeywords(FilenameUtils.getBaseName(file).replace("_", ",").replace("-", ","));

        String path = FilenameUtils
                .separatorsToUnix(FilenameUtils.concat(basePath, FilenameUtils.getBaseName(file)))
                .replaceFirst("/", "");

        setBaseTexture1(path);//from  w  ww  .jav  a  2s .c o  m

        // read the vtf header
        file = FilenameUtils.concat(workPath, file);
        File fileVTF = new File(file);

        try (LittleEndianDataInputStream in = new LittleEndianDataInputStream(new FileInputStream(fileVTF))) {

            int sig = in.readInt();
            if (SIGNATURE_VTF != sig) {
                throw new IOException("Not a VTF file");
            }

            if (0x10 != in.skipBytes(0x10)) {
                throw new IOException("skip failure");
            }

            int flags = in.readInt();
            frameCount = in.readShort();
            in.close(); // don't need any more information

            chkFlagNoLOD.setSelected(0 != (0x200 & flags));
            chkFlagTranslucent.setSelected(0 != (0x3000 & flags));

            if (animated = (1 < frameCount)) {
                setFrameRate(frameCount);
                ((SpinnerNumberModel) nudEnvMapFrame.getModel()).setMaximum(frameCount);
            }

            nudFrameRate.setEnabled(animated & !chkLockFrameRate.isSelected());
            nudEnvMapFrame.setEnabled(animated & !chkLockEnvMapFrame.isSelected());

            chkLockFrameRate.setEnabled(animated);
            chkLockEnvMapFrame.setEnabled(animated);

        } catch (FileNotFoundException ex) {
            logger.log(Level.SEVERE, null, ex);
        } catch (IOException ex) {
            logger.log(Level.SEVERE, null, ex);
        }
    }
}