Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:net.modsec.ms.connector.ConnRequestHandler.java

/**
 * Writes the modified modsecurity configurations to configuration file.
 * @param json contains modsecurity configurations as json object.
 *//*from  w  ww.j  a  v  a2s  .  co  m*/
@SuppressWarnings("unchecked")
public static void onWriteMSConfig(JSONObject json) {

    log.info("onWriteMSConfig called.. : " + json.toJSONString());

    MSConfig serviceCfg = MSConfig.getInstance();
    JSONObject jsonResp = new JSONObject();
    String fileName = serviceCfg.getConfigMap().get("MSConfigFile");
    String modifiedStr = "";

    InputStream ins = null;
    FileOutputStream out = null;
    BufferedReader br = null;

    try {

        File file = new File(fileName);
        DataInputStream in;

        @SuppressWarnings("resource")
        FileChannel channel = new RandomAccessFile(file, "rw").getChannel();
        FileLock lock = channel.lock();

        try {

            ins = new FileInputStream(file);
            in = new DataInputStream(ins);
            br = new BufferedReader(new InputStreamReader(in));

            String line = "";
            boolean check;

            while ((line = br.readLine()) != null) {

                check = true;
                //log.info("Line :" + line);
                for (ModSecConfigFields field : ModSecConfigFields.values()) {

                    if (line.startsWith(field.toString())) {
                        if (line.trim().split(" ")[0].equals(field.toString())) {
                            if (json.containsKey(field.toString())) {

                                if (((String) json.get(field.toString())).equals("")
                                        || json.get(field.toString()) == null) {

                                    log.info("---------- Log Empty value ----:"
                                            + (String) json.get(field.toString()));
                                    json.remove(field.toString());
                                    check = false;
                                    continue;

                                } else {

                                    modifiedStr += field.toString() + " " + json.remove(field.toString())
                                            + "\n";
                                    check = false;

                                }

                            }
                        }
                    }

                }

                if (check) {
                    modifiedStr += line + "\n";
                }

            }

            for (ModSecConfigFields field : ModSecConfigFields.values()) {
                if (json.containsKey(field.toString())) {

                    if (json.get(field.toString()) == null
                            || ((String) json.get(field.toString())).equals("")) {

                        log.info("---------- Log Empty value ----:" + (String) json.get(field.toString()));
                        json.remove(field.toString());
                        check = false;
                        continue;

                    } else {
                        modifiedStr += field.toString() + " " + json.remove(field.toString()) + "\n";
                    }

                }

            }

            //modified string writing to modsecurity configurations
            log.info("Writing File :" + modifiedStr);
            out = new FileOutputStream(fileName);
            out.write(modifiedStr.getBytes());

            log.info("ModSecurity Configurations configurations Written ... ");

        } finally {

            lock.release();

        }

        br.close();
        in.close();
        ins.close();
        out.close();

        //For Restarting modsecurity so that modified configuration can be applied
        JSONObject restartJson = new JSONObject();
        restartJson.put("action", "restart");

        String cmd = serviceCfg.getConfigMap().get("MSRestart");

        executeShScript(cmd, restartJson);

        jsonResp.put("action", "writeMSConfig");
        jsonResp.put("status", "0");
        jsonResp.put("message", "Configurations updated!");

    } catch (FileNotFoundException e1) {

        jsonResp.put("action", "writeMSConfig");
        jsonResp.put("status", "1");
        jsonResp.put("message", "Internal Service is down!");
        e1.printStackTrace();

    } catch (IOException | NullPointerException e) {

        jsonResp.put("action", "writeMSConfig");
        jsonResp.put("status", "0");
        jsonResp.put("message", "Unable to modify configurations. Sorry of inconvenience");
        e.printStackTrace();

    }

    log.info("Sending Json :" + jsonResp.toJSONString());
    ConnectorService.getConnectorProducer().send(jsonResp.toJSONString());
    jsonResp.clear();
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

public StringResult getStringResult(String key) throws IOException, InterruptedException {
    this.s3clientLock.readLock().lock();
    S3Object sobj = null;/*from   w w  w . j  a va 2s. c o  m*/
    try {

        ObjectMetadata md = null;
        try {
            sobj = s3Service.getObject(getName(), key);
            md = s3Service.getObjectMetadata(this.name, key);
        } catch (Exception e) {
            throw new IOException(e);
        }
        int cl = (int) md.getContentLength();

        byte[] data = new byte[cl];
        DataInputStream in = null;
        try {
            in = new DataInputStream(sobj.getObjectContent());
            in.readFully(data);

        } catch (Exception e) {
            throw new IOException(e);
        } finally {
            if (in != null)
                in.close();
        }
        boolean encrypt = false;
        boolean compress = false;
        boolean lz4compress = false;
        Map<String, String> mp = this.getUserMetaData(md);
        byte[] ivb = null;
        if (mp.containsKey("ivspec")) {
            ivb = BaseEncoding.base64().decode(mp.get("ivspec"));
        }
        if (mp.containsKey("md5sum")) {
            try {
                byte[] shash = BaseEncoding.base64().decode(mp.get("md5sum"));
                byte[] chash = ServiceUtils.computeMD5Hash(data);
                if (!Arrays.equals(shash, chash))
                    throw new IOException("download corrupt at " + sobj.getKey());
            } catch (NoSuchAlgorithmException e) {
                throw new IOException(e);
            }
        }
        int size = Integer.parseInt(mp.get("size"));
        encrypt = Boolean.parseBoolean(mp.get("encrypt"));

        lz4compress = Boolean.parseBoolean(mp.get("lz4compress"));
        boolean changed = false;

        Long hid = EncyptUtils.decHashArchiveName(sobj.getKey().substring(5), encrypt);
        if (this.clustered)
            mp = s3Service.getObjectMetadata(this.name, this.getClaimName(hid)).getUserMetadata();
        if (mp.containsKey("deleted")) {
            mp.remove("deleted");
            changed = true;
        }
        if (mp.containsKey("deleted-objects")) {
            mp.remove("deleted-objects");
            changed = true;
        }

        if (encrypt) {

            if (ivb != null) {
                data = EncryptUtils.decryptCBC(data, new IvParameterSpec(ivb));
            } else {
                data = EncryptUtils.decryptCBC(data);
            }
        }
        if (compress)
            data = CompressionUtils.decompressZLIB(data);
        else if (lz4compress) {
            data = CompressionUtils.decompressLz4(data, size);
        }

        String hast = new String(data);
        SDFSLogger.getLog().debug("reading hashes " + (String) mp.get("objects") + " from " + hid + " encn "
                + sobj.getKey().substring(5));
        StringTokenizer ht = new StringTokenizer(hast, ",");
        StringResult st = new StringResult();
        st.id = hid;
        st.st = ht;
        if (mp.containsKey("bsize")) {
            HashBlobArchive.currentLength.addAndGet(Integer.parseInt(mp.get("bsize")));
        }
        if (mp.containsKey("bcompressedsize")) {
            HashBlobArchive.compressedLength.addAndGet(Integer.parseInt(mp.get("bcompressedsize")));
        }
        if (changed) {
            try {
                md = sobj.getObjectMetadata();
                md.setUserMetadata(mp);
                String kn = null;
                if (this.clustered)
                    kn = this.getClaimName(hid);
                else
                    kn = sobj.getKey();

                this.updateObject(kn, md);
            } catch (Exception e) {
                throw new IOException(e);
            }
        }
        return st;
    } finally {
        if (sobj != null)
            sobj.close();
        this.s3clientLock.readLock().unlock();
    }
}

From source file:com.panet.imeta.trans.steps.blockingstep.BlockingStep.java

private Object[] getBuffer() {
    Object[] retval;//from   ww w . ja  va 2 s .  c om

    // Open all files at once and read one row from each file...
    if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) {
        if (log.isBasic())
            logBasic(Messages.getString("BlockingStep.Log.Openfiles"));

        try {
            FileObject fileObject = (FileObject) data.files.get(0);
            String filename = KettleVFS.getFilename(fileObject);
            if (log.isDetailed())
                logDetailed(Messages.getString("BlockingStep.Log.Openfilename1") + filename
                        + Messages.getString("BlockingStep.Log.Openfilename2"));
            InputStream fi = KettleVFS.getInputStream(fileObject);
            DataInputStream di;
            data.fis.add(fi);
            if (meta.getCompress()) {
                GZIPInputStream gzfi = new GZIPInputStream(new BufferedInputStream(fi));
                di = new DataInputStream(gzfi);
                data.gzis.add(gzfi);
            } else {
                di = new DataInputStream(fi);
            }
            data.dis.add(di);

            // How long is the buffer?
            int buffersize = di.readInt();

            if (log.isDetailed())
                logDetailed(Messages.getString("BlockingStep.Log.BufferSize1") + filename
                        + Messages.getString("BlockingStep.Log.BufferSize2") + buffersize + " "
                        + Messages.getString("BlockingStep.Log.BufferSize3"));

            if (buffersize > 0) {
                // Read a row from temp-file
                data.rowbuffer.add(data.outputRowMeta.readData(di));
            }
        } catch (Exception e) {
            logError(Messages.getString("BlockingStepMeta.ErrorReadingFile") + e.toString());
            logError(Const.getStackTracker(e));
        }
    }

    if (data.files.size() == 0) {
        if (data.buffer.size() > 0) {
            retval = (Object[]) data.buffer.get(0);
            data.buffer.remove(0);
        } else {
            retval = null;
        }
    } else {
        if (data.rowbuffer.size() == 0) {
            retval = null;
        } else {
            retval = (Object[]) data.rowbuffer.get(0);

            data.rowbuffer.remove(0);

            // now get another 
            FileObject file = (FileObject) data.files.get(0);
            DataInputStream di = (DataInputStream) data.dis.get(0);
            InputStream fi = (InputStream) data.fis.get(0);
            GZIPInputStream gzfi = (meta.getCompress()) ? (GZIPInputStream) data.gzis.get(0) : null;

            try {
                data.rowbuffer.add(0, data.outputRowMeta.readData(di));
            } catch (SocketTimeoutException e) {
                logError(Messages.getString("System.Log.UnexpectedError") + " : " + e.toString()); //$NON-NLS-1$ //$NON-NLS-2$
                logError(Const.getStackTracker(e));
                setErrors(1);
                stopAll();
            } catch (KettleFileException fe) // empty file or EOF mostly
            {
                try {
                    di.close();
                    fi.close();
                    if (gzfi != null)
                        gzfi.close();
                    file.delete();
                } catch (IOException e) {
                    logError(Messages.getString("BlockingStepMeta.UnableDeleteFile") + file.toString());
                    setErrors(1);
                    stopAll();
                    return null;
                }

                data.files.remove(0);
                data.dis.remove(0);
                data.fis.remove(0);
                if (gzfi != null)
                    data.gzis.remove(0);
            }
        }
    }
    return retval;
}

From source file:net.timewalker.ffmq4.storage.data.impl.journal.JournalRecovery.java

private int recoverFromJournalFile(File journalFile) throws JournalException {
    log.debug("[" + baseName + "] Processing " + journalFile.getAbsolutePath());

    DataInputStream in;
    try {//from  ww  w. j a v a 2  s .c  o m
        // Create a buffered data input stream from file
        in = new DataInputStream(new BufferedInputStream(new FileInputStream(journalFile)));
    } catch (IOException e) {
        throw new JournalException("Cannot open journal file : " + journalFile.getAbsolutePath(), e);
    }

    int replayedOperations = 0;
    int replayedTransactions = 0;
    long currentTransactionId = -1;
    int newBlockCount = -1;
    LinkedList<AbstractJournalOperation> transactionQueue = new LinkedList<>();
    try {
        AbstractJournalOperation op;
        while ((op = readJournalOperation(in)) != null) {
            // Check transaction id
            if (currentTransactionId == -1)
                currentTransactionId = op.getTransactionId();
            else if (currentTransactionId != op.getTransactionId())
                throw new IllegalStateException("Transaction id inconsistency : " + currentTransactionId
                        + " -> " + op.getTransactionId());

            if (op instanceof CommitOperation) {
                // Check transaction size
                int opCount = ((CommitOperation) op).getOperationsCount();
                if (transactionQueue.size() != opCount) {
                    throw new IllegalStateException("Transaction size mismatch (expected " + opCount + ", got "
                            + transactionQueue.size() + ")");
                } else {
                    // Everything looks fine, proceed ...
                    log.trace("[" + baseName + "] Replaying transaction #" + currentTransactionId + " ("
                            + transactionQueue.size() + " operation(s))");
                    replayedOperations += transactionQueue.size();
                    replayedTransactions++;
                    newBlockCount = applyOperations(transactionQueue);
                    currentTransactionId = -1;
                }
            } else
                transactionQueue.addLast(op);
        }

        if (transactionQueue.size() > 0) {
            op = transactionQueue.removeFirst();
            log.warn("[" + baseName + "] Dropping incomplete transaction : #" + op.getTransactionId());
        }

        syncStore();

        log.warn("[" + baseName + "] Recovery complete. (Replayed " + replayedTransactions
                + " transaction(s) and " + replayedOperations + " operation(s))");
    } finally {
        try {
            in.close();
        } catch (IOException e) {
            throw new JournalException("Cannot close journal file : " + journalFile.getAbsolutePath(), e);
        }
    }

    return newBlockCount;
}

From source file:org.apache.hadoop.hive.sql.QTestUtil.java

public void addFile(File qf) throws Exception {

    FileInputStream fis = new FileInputStream(qf);
    BufferedInputStream bis = new BufferedInputStream(fis);
    DataInputStream dis = new DataInputStream(bis);
    StringBuilder qsb = new StringBuilder();

    // Look for a hint to not run a test on some Hadoop versions
    Pattern pattern = Pattern.compile("-- EXCLUDE_HADOOP_MAJOR_VERSIONS(.*)");

    // Read the entire query
    boolean excludeQuery = false;
    String hadoopVer = ShimLoader.getMajorVersion();
    while (dis.available() != 0) {
        String line = dis.readLine();

        // While we are reading the lines, detect whether this query wants to be
        // excluded from running because the Hadoop version is incorrect
        Matcher matcher = pattern.matcher(line);
        if (matcher.find()) {
            String group = matcher.group();
            int start = group.indexOf('(');
            int end = group.indexOf(')');
            assert end > start;
            // versions might be something like '0.17, 0.19'
            String versions = group.substring(start + 1, end);

            Set<String> excludedVersionSet = new HashSet<String>();
            for (String s : versions.split("\\,")) {
                s = s.trim();/*from   www.  j a v a  2 s.  c  o  m*/
                excludedVersionSet.add(s);
            }
            if (excludedVersionSet.contains(hadoopVer)) {
                excludeQuery = true;
            }
        }
        qsb.append(line + "\n");
    }
    qMap.put(qf.getName(), qsb.toString());
    if (excludeQuery) {
        System.out.println("Due to the Hadoop Version (" + hadoopVer + "), " + "adding query " + qf.getName()
                + " to the set of tests to skip");
        qSkipSet.add(qf.getName());
    }
    dis.close();
}

From source file:Util.PacketGenerator.java

public void GenerateGraphGnuplot() {
    try {/* w  w w .j  a  v  a 2 s. c  om*/
        for (int j = 6; j <= 6; j++) {
            File real = new File("D:\\Mestrado\\SketchMatrix\\trunk\\Simulations\\Analise\\Scenario1\\Topology"
                    + j + "\\Real.csv");
            for (int k = 1; k <= 4; k++) {
                File simu = new File(
                        "D:\\Mestrado\\SketchMatrix\\trunk\\Simulations\\Analise\\Scenario1\\Topology" + j
                                + "\\SimulacaoInstancia" + k + ".csv");
                File dat = new File(
                        "D:\\Mestrado\\SketchMatrix\\trunk\\Simulations\\Analise\\Scenario1\\Topology" + j
                                + "\\SimulacaoInstancia" + k + ".txt");

                FileInputStream simuFIS = new FileInputStream(simu);
                DataInputStream simuDIS = new DataInputStream(simuFIS);
                BufferedReader simuBR = new BufferedReader(new InputStreamReader(simuDIS));

                FileInputStream realFIS = new FileInputStream(real);
                DataInputStream realDIS = new DataInputStream(realFIS);
                BufferedReader realBR = new BufferedReader(new InputStreamReader(realDIS));

                PrintWriter datPW = new PrintWriter(dat);

                String lineSimu = simuBR.readLine();
                String lineReal = realBR.readLine();

                double maxX = 0.0;
                double maxY = 0.0;

                HashMap<Double, Double> map = new HashMap<>();

                while (lineSimu != null && lineReal != null) {

                    lineSimu = lineSimu.replaceAll(",", ".");
                    String[] simuMatriz = lineSimu.split(";");
                    String[] realMatriz = lineReal.split(";");

                    for (int i = 0; i < simuMatriz.length; i++) {
                        try {
                            Integer valorReal = Integer.parseInt(realMatriz[i]);
                            Double valorSimu = Double.parseDouble(simuMatriz[i]);
                            if (map.containsKey(valorReal) && map.containsValue(valorSimu)) {
                                continue;
                            }
                            map.put(valorReal.doubleValue(), valorSimu);
                            datPW.write(valorReal.doubleValue() + "\t");
                            datPW.write(valorReal.doubleValue() + "\t");
                            datPW.write(valorSimu.doubleValue() + "\t");
                            datPW.write(valorReal.doubleValue() * 1.2 + "\t");
                            datPW.write(valorReal.doubleValue() * 0.8 + "\n");
                            if (valorReal > maxX) {
                                maxX = valorReal;
                            }
                            if (valorSimu > maxY) {
                                maxY = valorSimu;
                            }
                        } catch (NumberFormatException ex) {

                        }
                    }
                    lineSimu = simuBR.readLine();
                    lineReal = realBR.readLine();
                }

                simuFIS.close();
                simuDIS.close();
                simuBR.close();

                realFIS.close();
                realDIS.close();
                realBR.close();

                datPW.close();

                Double max = Math.max(maxX, maxY);
                max *= 1.05;

                Process p = Runtime.getRuntime().exec("cmd");
                new Thread(new SyncPipe(p.getErrorStream(), System.err)).start();
                new Thread(new SyncPipe(p.getInputStream(), System.out)).start();
                PrintWriter stdin = new PrintWriter(p.getOutputStream());
                stdin.println("gnuplot");
                stdin.println("cd 'D:\\Mestrado\\SketchMatrix\\trunk\\Simulations\\Analise\\Scenario1\\Topology"
                        + j + "'");
                stdin.println("set terminal postscript eps enhanced \"Times\" 20");
                stdin.println("set output \"SimulacaoInstancia" + k + ".eps\"");
                stdin.println("unset title");
                stdin.println("unset style line");
                stdin.println("set style line 1 pt 7 lc 7 lw 1");
                stdin.println("set style line 2 lt 1 lc 7 lw 1");
                stdin.println("set style line 3 lt 4 lc 7 lw 1");
                stdin.println("set style line 4 lt 4 lc 7 lw 1");
                stdin.println("set style line 5 lt 5 lc 5 lw 3");
                stdin.println("set style line 6 lt 6 lc 6 lw 3");
                stdin.println("set style line 7 pt 7 lc 7 lw 3");
                if (k == 4) {
                    stdin.println("set ylabel \"CMO-MT\"");
                    stdin.println("set xlabel \"Real\"");
                } else {
                    stdin.println("set ylabel \"Zhao\"");
                    stdin.println("set xlabel \"CMO-MT\"");
                }
                stdin.println("set key top left");
                stdin.println("set xrange [0:" + max.intValue() + "]");
                stdin.println("set yrange [0:" + max.intValue() + "]");
                stdin.println("set grid ytics lc rgb \"#bbbbbb\" lw 1 lt 0");
                stdin.println("set grid xtics lc rgb \"#bbbbbb\" lw 1 lt 0");
                stdin.println("plot " + "x title \"Referencia\"      ls 2," + "\"SimulacaoInstancia" + k
                        + ".txt\" using 1:3 title \"Matriz\"          ls 7,"
                        + "1.2*x title \"Superior 20%\"    ls 4," + "0.8*x title \"Inferior 20%\"    ls 4");

                stdin.println("exit");
                stdin.println("exit");
                // write any other commands you want here
                stdin.close();
                int returnCode = p.waitFor();
                System.out.println("Return code = " + returnCode);
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.apache.hadoop.hive.ql.QTestUtil2.java

public void addFile(File qf) throws Exception {

    FileInputStream fis = new FileInputStream(qf);
    BufferedInputStream bis = new BufferedInputStream(fis);
    DataInputStream dis = new DataInputStream(bis);
    StringBuilder qsb = new StringBuilder();

    // Look for a hint to not run a test on some Hadoop versions
    Pattern pattern = Pattern.compile("-- EXCLUDE_HADOOP_MAJOR_VERSIONS(.*)");

    // Read the entire query
    boolean excludeQuery = false;
    String hadoopVer = ShimLoader.getMajorVersion();
    while (dis.available() != 0) {
        String line = dis.readLine();

        // While we are reading the lines, detect whether this query wants
        // to be/*from   ww  w.j  av a 2s .  com*/
        // excluded from running because the Hadoop version is incorrect
        Matcher matcher = pattern.matcher(line);
        if (matcher.find()) {
            String group = matcher.group();
            int start = group.indexOf('(');
            int end = group.indexOf(')');
            assert end > start;
            // versions might be something like '0.17, 0.19'
            String versions = group.substring(start + 1, end);

            Set<String> excludedVersionSet = new HashSet<String>();
            for (String s : versions.split("\\,")) {
                s = s.trim();
                excludedVersionSet.add(s);
            }
            if (excludedVersionSet.contains(hadoopVer)) {
                excludeQuery = true;
            }
        }
        qsb.append(line + "\n");
    }
    qMap.put(qf.getName(), qsb.toString());
    if (excludeQuery) {
        System.out.println("Due to the Hadoop Version (" + hadoopVer + "), " + "adding query " + qf.getName()
                + " to the set of tests to skip");
        qSkipSet.add(qf.getName());
    }
    dis.close();
}

From source file:cc.redberry.core.tensor.BulkTestsForParser.java

private static void testParseRecurrently(File file, Counter containsParseLinesCounter,
        Counter matchedLinesCounter, DescriptiveStatistics statistics) {
    if (file.isFile()) {
        if (file.getName().equals(BulkTestsForParser.class.getSimpleName() + ".java"))
            return;
        if (file.getName().equals(ParserTest.class.getSimpleName() + ".java"))
            return;
        if (file.getName().equals(NumberParserTest.class.getSimpleName() + ".java"))
            return;

        FileInputStream fileInputStream;
        try {//from   w  w  w .j a  va  2 s .  c  o m
            fileInputStream = new FileInputStream(file);
        } catch (FileNotFoundException e) {
            throw new RuntimeException();
        }
        DataInputStream dataInputStream = new DataInputStream(fileInputStream);
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(dataInputStream));
        String string;
        try {
            boolean containsParse;
            boolean matchedParse;
            int lineNumber = -1;
            String bufferedString = null;
            while ((string = bufferedReader.readLine()) != null) {
                ++lineNumber;
                if (bufferedString != null) {
                    string = bufferedString + "\n" + string;
                    bufferedString = null;
                }
                matchedParse = false;
                if (string.contains("IndexMappingTestUtils.parse") || string.contains("ParserIndices.parse"))
                    continue;
                containsParse = (string.contains("parse(") || string.contains("parseExpression(")
                        || string.contains("parseSimple(")) && string.contains("\"");
                string = string.trim();
                if (string.length() > 0) {
                    char c = string.charAt(string.length() - 1);
                    if (c == '\"' || c == '+' || c == '(') {
                        bufferedString = string;
                        continue;
                    }
                }

                string = string.replaceAll("\n", "");
                string = string.replaceAll("\"[\\s]*\\+[\\+]*[\\s]*\"", "");
                Matcher matcher = pattern.matcher(string);
                String tensorString;
                Tensor tensor;
                while (matcher.find()) {
                    matchedParse = true;
                    tensorString = matcher.group(2);
                    //if (tensorString.length() > 100)
                    //    System.out.println("\"" + tensorString + "\",");
                    tensorString = tensorString.replace("\\\\", "\\");
                    if (tensorString.contains("\"")) {
                        tensorString = tensorString.split("\"")[0];
                    }

                    try {
                        statistics.addValue(tensorString.length());
                        tensor = Tensors.parse(tensorString);
                        checkTensor(tensor);
                    } catch (AssertionError | RuntimeException e) {

                        System.out.println(e.getClass().getSimpleName() + ":");
                        System.out.println(tensorString);
                        System.out.println(file + "  line: " + lineNumber);
                        System.out.println();
                        throw new RuntimeException(e);
                    }

                }

                if (containsParse && !matchedParse && bufferedString == null) {
                    System.out.println("Parse but not matched:");
                    System.out.println(string);
                    System.out.println(file + "  line: " + lineNumber);
                    System.out.println();
                }
                if (containsParse && bufferedString == null)
                    containsParseLinesCounter.increase();
                if (matchedParse)
                    matchedLinesCounter.increase();
            }
            bufferedReader.close();
            dataInputStream.close();
        } catch (IOException e) {
            throw new RuntimeException();
        }

    } else if (file.isDirectory()) {
        File[] listOfFiles = file.listFiles();
        if (listOfFiles != null) {
            for (int i = 0; i < listOfFiles.length; i++)
                testParseRecurrently(listOfFiles[i], containsParseLinesCounter, matchedLinesCounter,
                        statistics);
        }
    } else
        throw new RuntimeException();
}

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

/**
 * process the kdm files. decides if its xml or triples.
 * //from ww  w . jav a2 s  . c om
 * @param kdmFiles
 * @throws ToifException
 */
private void processkdm(List<File> kdmFiles) throws ToifException {
    if (kdmFiles.size() == 0) {
        return;
    }

    File kdmFile = kdmFiles.get(0);

    FileInputStream is = null;
    DataInputStream din = null;
    BufferedReader br = null;
    try {
        is = new FileInputStream(kdmFile);
        din = new DataInputStream(is);
        br = new BufferedReader(new InputStreamReader(din));

        String firstLine = br.readLine();

        if (firstLine.isEmpty()) {
            return;
        } else if (firstLine.startsWith("<?xml")) {
            processKdmXmlFile(kdmFiles);
        } else {
            processKdmFile(kdmFiles);
        }

    } catch (FileNotFoundException e) {
        LOG.error("kdm file not found");
        return;
    } catch (IOException e) {
        LOG.error("error reading kdm file");
        return;
    } catch (RepositoryException e) {
        LOG.error("error accessing repository for writing xml nodes");
        return;
    } finally {
        try {
            if (br != null)
                br.close();

            if (din != null)
                din.close();

            if (is != null)
                is.close();

        } catch (IOException e) {
            LOG.error("Unable to close stream: ", e);
        }

    }

}

From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageDecompressor.java

/**
 * Process image file./*from   w  w  w  .ja v a  2s. c o  m*/
 */
private void go() throws IOException {
    long start = System.currentTimeMillis();
    System.out.println("Decompressing image file: " + inputFile + " to " + outputFile);
    DataInputStream in = null;
    DataOutputStream out = null;

    try {
        // setup in
        PositionTrackingInputStream ptis = new PositionTrackingInputStream(
                new FileInputStream(new File(inputFile)));
        in = new DataInputStream(ptis);

        // read header information
        int imgVersion = in.readInt();
        if (!LayoutVersion.supports(Feature.FSIMAGE_COMPRESSION, imgVersion)) {
            System.out.println("Image is not compressed. No output will be produced.");
            return;
        }
        int namespaceId = in.readInt();
        long numFiles = in.readLong();
        long genstamp = in.readLong();

        long imgTxId = -1;
        if (LayoutVersion.supports(Feature.STORED_TXIDS, imgVersion)) {
            imgTxId = in.readLong();
        }
        FSImageCompression compression = FSImageCompression.readCompressionHeader(new Configuration(), in);
        if (compression.isNoOpCompression()) {
            System.out.println("Image is not compressed. No output will be produced.");
            return;
        }
        in = BufferedByteInputStream.wrapInputStream(compression.unwrapInputStream(in),
                FSImage.LOAD_SAVE_BUFFER_SIZE, FSImage.LOAD_SAVE_CHUNK_SIZE);
        System.out.println("Starting decompression.");

        // setup output
        out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(outputFile)));

        // write back the uncompressed information
        out.writeInt(imgVersion);
        out.writeInt(namespaceId);
        out.writeLong(numFiles);
        out.writeLong(genstamp);
        if (LayoutVersion.supports(Feature.STORED_TXIDS, imgVersion)) {
            out.writeLong(imgTxId);
        }
        // no compression
        out.writeBoolean(false);

        // copy the data
        long size = new File(inputFile).length();
        // read in 1MB chunks
        byte[] block = new byte[1024 * 1024];
        while (true) {
            int bytesRead = in.read(block);
            if (bytesRead <= 0)
                break;
            out.write(block, 0, bytesRead);
            printProgress(ptis.getPos(), size);
        }

        out.close();

        long stop = System.currentTimeMillis();
        System.out.println("Input file : " + inputFile + " size: " + size);
        System.out.println("Output file: " + outputFile + " size: " + new File(outputFile).length());
        System.out.println("Decompression completed in " + (stop - start) + " ms.");
    } finally {
        if (in != null)
            in.close();
        if (out != null)
            out.close();
    }
}