Example usage for com.google.common.io Closeables close

List of usage examples for com.google.common.io Closeables close

Introduction

In this page you can find the example usage for com.google.common.io Closeables close.

Prototype

public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException 

Source Link

Document

Closes a Closeable , with control over whether an IOException may be thrown.

Usage

From source file:org.qcri.pca.PCACommon.java

static DenseVector toDenseVector(Path vectorFile, Configuration conf) throws IOException {
    SequenceFileIterator<IntWritable, VectorWritable> iterator = new SequenceFileIterator<IntWritable, VectorWritable>(
            vectorFile, true, conf);//from w  w w.  j  a va 2s.  c  o m
    DenseVector vector;
    try {
        Pair<IntWritable, VectorWritable> next;
        next = iterator.next();
        vector = new DenseVector(next.getSecond().get());
    } finally {
        Closeables.close(iterator, false);
    }
    return vector;
}

From source file:eu.interedition.web.index.IndexController.java

public void update(DublinCoreMetadata metadata) throws IOException {
    final Text text = (Text) sessionFactory.getCurrentSession().load(Text.class, metadata.getText());
    Reader textReader = null;//from   w w  w.ja v a 2  s.  c  o  m
    try {
        final Document document = new Document();
        document.add(new Field("id", Long.toString(text.getId()), Field.Store.YES, Field.Index.NOT_ANALYZED));
        document.add(new Field("type", text.getType().toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
        document.add(new Field("content_length", Long.toString(text.getLength()), Field.Store.YES,
                Field.Index.NOT_ANALYZED));
        document.add(new Field("content", textReader = text.read().getInput()));

        metadata.addTo(document);

        indexWriter.updateDocument(idTerm(metadata), document);
        commit();

    } finally {
        Closeables.close(textReader, false);
    }
}

From source file:org.apache.mahout.math.hadoop.MathHelper.java

/**
 * write a two-dimensional double array to an SequenceFile<IntWritable,VectorWritable>
 */// www .j a v a 2s . c  o  m
public static void writeDistributedRowMatrix(double[][] entries, FileSystem fs, Configuration conf, Path path)
        throws IOException {
    SequenceFile.Writer writer = null;
    try {
        writer = new SequenceFile.Writer(fs, conf, path, IntWritable.class, VectorWritable.class);
        for (int n = 0; n < entries.length; n++) {
            Vector v = new RandomAccessSparseVector(entries[n].length);
            for (int m = 0; m < entries[n].length; m++) {
                v.setQuick(m, entries[n][m]);
            }
            writer.append(new IntWritable(n), new VectorWritable(v));
        }
    } finally {
        Closeables.close(writer, false);
    }
}

From source file:org.kitesdk.data.spi.filesystem.SchemaManager.java

private Schema loadSchema(Path schemaPath) {
    Schema schema = null;/*from  w w  w.  j  a va2 s .  c o m*/
    InputStream inputStream = null;
    boolean threw = true;

    try {
        inputStream = rootFileSystem.open(schemaPath);
        schema = new Schema.Parser().parse(inputStream);
        threw = false;
    } catch (IOException e) {
        throw new DatasetIOException("Unable to load schema file:" + schemaPath, e);
    } finally {
        try {
            Closeables.close(inputStream, threw);
        } catch (IOException e) {
            throw new DatasetIOException("Cannot close", e);
        }
    }

    return schema;
}

From source file:eu.interedition.text.rdbms.RelationalTextRepository.java

public void read(Text text, final Range range, final TextConsumer consumer) throws IOException {
    read(new ReaderCallback<Void>(text) {

        @Override// w  w w  .j av a2  s.c  o  m
        protected Void read(Clob content) throws SQLException, IOException {
            Reader contentReader = null;
            try {
                consumer.read(contentReader = new RangeFilteringReader(content.getCharacterStream(), range),
                        range.length());
            } catch (IOException e) {
                Throwables.propagate(e);
            } finally {
                Closeables.close(contentReader, false);
            }
            return null;
        }
    });
}

From source file:io.druid.segment.data.InMemoryCompressedLongs.java

@Override
public void close() throws IOException {
    Closeables.close(holder, false);
}

From source file:JavaCustomReceiver.java

/** Create a socket connection and receive data until receiver is stopped */
private void receive() {
    try {/*from   w  w w.  java 2s  . c  o m*/
        Socket socket = null;
        BufferedReader reader = null;
        String userInput = null;
        try {
            // connect to the server
            socket = new Socket(host, port);
            reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
            // Until stopped or connection broken continue reading
            while (!isStopped() && (userInput = reader.readLine()) != null) {
                System.out.println("Received data '" + userInput + "'");
                store(userInput);
            }
        } finally {
            Closeables.close(reader, /* swallowIOException = */ true);
            Closeables.close(socket, /* swallowIOException = */ true);
        }
        // Restart in an attempt to connect again when server is active
        // again
        restart("Trying to connect again");
    } catch (ConnectException ce) {
        // restart if could not connect to server
        restart("Could not connect", ce);
    } catch (Throwable t) {
        restart("Error receiving data", t);
    }
}

From source file:org.apache.hama.ml.ann.NeuralNetwork.java

/**
 * Write the model data to specified location.
 * /*from w  w w. ja  v a  2  s  .c o  m*/
 * @throws IOException
 */
public void writeModelToFile() throws IOException {
    Preconditions.checkArgument(this.modelPath != null, "Model path has not been set.");
    Configuration conf = new Configuration();
    FSDataOutputStream is = null;
    try {
        URI uri = new URI(this.modelPath);
        FileSystem fs = FileSystem.get(uri, conf);
        is = fs.create(new Path(this.modelPath), true);
        this.write(is);
    } catch (URISyntaxException e) {
        e.printStackTrace();
    }

    Closeables.close(is, false);
}

From source file:org.nll.hbase.ui.util.HbaseUtil.java

/**
 * scan data//from  w  w w.  j  a v a 2  s.  c  o m
 *
 * @param connection
 * @param query
 * @return
 * @throws Exception
 */
public static List<HbaseData> scan(HConnection connection, HbaseQuery query) throws Exception {
    List<HbaseData> datas = Lists.newLinkedList();
    List<Filter> listForFilters = Lists.newArrayList();

    Scan scan = new Scan();
    if (StringUtils.isNotNullOrEmpty(query.getPrefixRowkey())) {
        listForFilters.add(new PrefixFilter(Bytes.toBytes(query.getPrefixRowkey())));
    }
    //FIXME set 10 but return 20
    PageFilter pageFilter = new PageFilter(query.getPageSize());
    listForFilters.add(pageFilter);
    if (listForFilters.size() == 1) {
        scan.setFilter(listForFilters.get(0));
    } else {
        Filter filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL, listForFilters);
        scan.setFilter(filterList);
    }
    if (StringUtils.isNotNullOrEmpty(query.getStartRowkey())) {
        scan.setStartRow(Bytes.toBytes(query.getStartRowkey()));
    }
    if (StringUtils.isNotNullOrEmpty(query.getStopRowkey())) {
        scan.setStopRow(Bytes.toBytes(query.getStopRowkey()));
    }
    for (String family : query.getFamilies()) {
        scan.addFamily(Bytes.toBytes(family));
    }
    scan.setCaching(100);
    ResultScanner rs = null;
    HTableInterface table = null;
    try {
        table = getTable(connection, query.getTableName());
        rs = table.getScanner(scan);
        int count = 0;
        for (Result r : rs) {
            count++;
            HbaseData data = new HbaseData();
            data.setRowkey(Bytes.toString(r.getRow()));
            Map<String, FamilyData> dataValues = Maps.newLinkedHashMap();
            for (KeyValue kv : r.list()) {
                FamilyData familyData = new FamilyData();
                String family = Bytes.toString(kv.getFamily());
                String key = Bytes.toString(kv.getQualifier());
                String value = Bytes.toString(kv.getValue());
                familyData.setFamilyName(family);
                familyData.setKey(key);
                familyData.setValue(value);
                dataValues.put(key, familyData);
            }
            data.setDatas(dataValues);
            datas.add(data);
        }
        logger.info("hbase return data size:{}", count);
    } finally {
        Closeables.close(rs, true);
        Closeables.close(table, true);
    }
    return datas;
}

From source file:org.apache.mahout.math.hadoop.decomposer.HdfsBackedLanczosState.java

protected void persistVector(Path p, int key, Vector vector) throws IOException {
    SequenceFile.Writer writer = null;
    try {//from   w ww  .  j a  v  a2 s  .c o m
        if (fs.exists(p)) {
            log.warn("{} exists, will overwrite", p);
            fs.delete(p, true);
        }
        writer = new SequenceFile.Writer(fs, conf, p, IntWritable.class, VectorWritable.class);
        writer.append(new IntWritable(key), new VectorWritable(vector));
    } finally {
        Closeables.close(writer, false);
    }
}