Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:com.endgame.binarypig.loaders.av.ClamScanDaemonLoader.java

License:Apache License

@Override
public Tuple processFile(Text key, BytesWritable value, File binaryFile) throws IOException {
    boolean timedOut = false;
    Socket sock = null;/*  w w  w .  java 2s.  co  m*/
    BufferedReader in = null;
    OutputStream out = null;
    String result = "";
    try {
        sock = new Socket();
        if (getTimeoutMS() < (long) Integer.MAX_VALUE) {
            sock.setSoTimeout((int) getTimeoutMS());
        }

        sock.connect(clamdEndoint);

        out = sock.getOutputStream();
        in = new BufferedReader(new InputStreamReader(sock.getInputStream()));

        out.write(("nSCAN " + binaryFile.getAbsolutePath() + "\n").getBytes());
        String data = in.readLine();
        if (data != null) {
            result = data.substring(data.indexOf(':') + 1). // "remove the /path/to/file: "
                    replace(" FOUND", ""). // no need for the "FOUND" string
                    replaceAll("\\([a-f0-9:]+\\)$", "").trim(); // on some versions of clamscan, it adds (MD5:NUM)
        }
    } catch (SocketTimeoutException e) {
        result = "";
        timedOut = true;
    } finally {
        IOUtils.closeSocket(sock);
        IOUtils.closeStream(in);
        IOUtils.closeStream(out);
    }

    getProtoTuple().clear();
    getProtoTuple().add(key.toString());
    getProtoTuple().add(timedOut);
    getProtoTuple().add(result);
    return getTupleFactory().newTuple(getProtoTuple());
}

From source file:com.endgame.binarypig.loaders.TextDaemonLoader.java

License:Apache License

private void close() {
    IOUtils.closeSocket(sock);
    IOUtils.closeStream(in);
    IOUtils.closeStream(out);
}

From source file:com.endgame.binarypig.util.ProgramExector.java

License:Apache License

public void closeStreams() {
    IOUtils.closeStream(getStderr());
    IOUtils.closeStream(getStdin());
    IOUtils.closeStream(getStdout());
}

From source file:com.endgame.binarypig.util.Server.java

License:Apache License

public void run() {
    Socket client = null;/*  w  w w  .  j a  v a 2 s  .  c  o m*/
    BufferedReader in = null;
    PrintWriter out = null;

    try {
        client = sock.accept();
        if (sleepMS > 0) {
            try {
                Thread.sleep(sleepMS);
            } catch (InterruptedException e) {
            }
        }

        in = new BufferedReader(new InputStreamReader(client.getInputStream()));
        String line = in.readLine();
        sent = line;

        out = new PrintWriter(client.getOutputStream());
        out.println(reply);
    } catch (IOException e) {

        e.printStackTrace();
    } finally {
        IOUtils.closeStream(out);
        IOUtils.closeStream(in);
        IOUtils.closeStream(sock);
        IOUtils.closeStream(client);
    }
}

From source file:com.esri.geoevent.transport.hdfs.HDFSConnection.java

License:Apache License

public void close() {
    if (fsDataOutputStream != null)
        IOUtils.closeStream(fsDataOutputStream);
    fsDataOutputStream = null;
}

From source file:com.fanlehai.hadoop.serialize.json.multiline.ExampleJob.java

License:Apache License

/**
 * Writes the contents of {@link #JSON} into a file in the job input
 * directory in HDFS./* ww w.ja  v  a2s . c om*/
 *
 * @param conf
 *            the Hadoop config
 * @param inputDir
 *            the HDFS input directory where we'll write a file
 * @throws IOException
 *             if something goes wrong
 */
public static void writeInput(Configuration conf, Path inputDir) throws IOException {
    FileSystem fs = FileSystem.get(conf);

    if (fs.exists(inputDir)) {
        // throw new IOException(
        // String.format("Input directory '%s' exists - please remove and
        // rerun this example", inputDir));
        fs.delete(inputDir, true);
    }

    OutputStreamWriter writer = new OutputStreamWriter(fs.create(new Path(inputDir, "input.txt")));
    writer.write(JSON);
    IOUtils.closeStream(writer);
}

From source file:com.flipkart.fdp.migration.distcp.codec.GenericHadoopCodec.java

License:Apache License

public void close() throws IOException {
    IOUtils.closeStream(fs);

}

From source file:com.flipkart.fdp.migration.distcp.core.MirrorFileRecordReader.java

License:Apache License

public void closeStreams() throws IOException {

    IOUtils.closeStream(in);
    IOUtils.closeStream(out);//from  w w  w .j  a v  a 2  s.  c o  m

    if (status.getStatus() == Status.COMPLETED) {

        outCodec.renameFile(status.getOutputPath() + DCMConstants.DCM_TEMP_EXTENSION, status.getOutputPath());

        if (dcmConfig.getSourceConfig().isDeleteSource()) {
            try {
                inCodec.deleteSoureFile(srcPath);
            } catch (Exception e) {
                System.err.println("Failed Deleting file: " + srcPath + ", Exception: " + e.getMessage());
            }
        }
    }
}

From source file:com.flipkart.fdp.migration.distcp.core.MirrorFileRecordReader.java

License:Apache License

@Override
public void close() throws IOException {

    System.out.println("Transfer Complete...");
    IOUtils.closeStream(inCodec);
    IOUtils.closeStream(outCodec);/* w ww .  j  a va 2 s  .c o m*/
    IOUtils.closeStream(stateManager);
}

From source file:com.flipkart.fdp.migration.distcp.state.HDFSStateManager.java

License:Apache License

@Override
public void close() throws IOException {
    if (statusWriter != null) {
        IOUtils.closeStream(statusWriter);
        statusWriter = null;/*  w  w  w .jav  a 2 s  .  c  om*/
    }
}