Example usage for org.apache.commons.io FileUtils openInputStream

List of usage examples for org.apache.commons.io FileUtils openInputStream

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils openInputStream.

Prototype

public static FileInputStream openInputStream(File file) throws IOException 

Source Link

Document

Opens a FileInputStream for the specified file, providing better error messages than simply calling new FileInputStream(file).

Usage

From source file:cat.calidos.morfeu.model.injection.URIToParsedModule.java

@Produces
@Named("FetchedRawContent")
public static InputStream fetchedRawContent(@Named("FetchableContentURI") URI uri) throws FetchingException {

    // if uri is absolute we retrieve it, otherwise we assume it's a local relative file

    try {/*from w  w w  .j  a v  a 2s. c o m*/
        if (uri.isAbsolute()) {
            log.info("Fetching absolute content uri '{}' to parse", uri);
            return IOUtils.toInputStream(IOUtils.toString(uri, Config.DEFAULT_CHARSET), Config.DEFAULT_CHARSET);
        } else {
            log.info("Fetching relative content uri '{}' to parse, assuming file", uri);
            return FileUtils.openInputStream(new File(uri.toString()));
        }
    } catch (IOException e) {
        log.error("Could not fetch '{}' ({}", uri, e);
        throw new FetchingException("Problem when fetching '" + uri + "'", e);
    }
}

From source file:com.tascape.reactor.report.MySqlBaseBean.java

public MySqlBaseBean() throws SQLException, IOException {
    Properties p = new Properties();
    LOG.info("load reactor db access info from {}", DB_CONFIG_FILE);
    File f = new File(DB_CONFIG_FILE);
    if (f.exists()) {
        try (InputStream in = FileUtils.openInputStream(f)) {
            p.load(in);//from   ww  w.  java  2  s . com
        }
    }
    LOG.debug("{}", p);
    BoneCPConfig connPoolConfig = new BoneCPConfig();
    String url = "jdbc:mysql://" + p.getProperty(DbHandler.SYSPROP_DATABASE_HOST, "127.0.0.1") + "/"
            + p.getProperty(DbHandler.SYSPROP_DATABASE_SCHEMA, "reactor");
    connPoolConfig.setJdbcUrl(url);
    LOG.info("connect to {}", url);
    connPoolConfig.setUsername(p.getProperty(DbHandler.SYSPROP_DATABASE_USER, "reactor"));
    connPoolConfig.setPassword(p.getProperty(DbHandler.SYSPROP_DATABASE_PASS, "p@ssword"));
    connPoolConfig.setMaxConnectionAgeInSeconds(600);
    connPoolConfig.setDefaultAutoCommit(true);
    connPoolConfig.setIdleConnectionTestPeriodInSeconds(30);
    connPoolConfig.setConnectionTestStatement("SELECT 1");
    this.connPool = new BoneCP(connPoolConfig);
    LOG.info("{}", connPool);
}

From source file:net.frogmouth.ddf.jpeginputtransformer.TestJpegInputTransformer.java

@Test()
public void testIPhone() throws IOException, CatalogTransformerException, UnsupportedQueryException,
        SourceUnavailableException, FederationException, ParseException {
    File file = new File(TEST_DATA_PATH + "Apple iPhone 4.jpg");
    FileInputStream fis = FileUtils.openInputStream(file);
    Metacard metacard = createTransformer().transform(fis);

    assertNotNull(metacard);/*  ww w  .  j a  v  a 2 s.  c om*/

    assertNotNull(metacard.getCreatedDate());
    assertThat(metacard.getCreatedDate().getYear() + 1900, is(2011));
    assertThat(metacard.getCreatedDate().getMonth() + 1, is(1));
    assertThat(metacard.getCreatedDate().getDate(), is(13));
    assertThat(metacard.getCreatedDate().getHours(), is(14));
    assertThat(metacard.getCreatedDate().getMinutes(), is(33));
    assertThat(metacard.getCreatedDate().getSeconds(), is(39));

    assertEquals(metacard.getCreatedDate(), metacard.getModifiedDate());

    WKTReader reader = new WKTReader();
    Geometry geometry = reader.read(metacard.getLocation());
    assertEquals(12.488833, geometry.getCoordinate().x, 0.00001);
    assertEquals(41.853, geometry.getCoordinate().y, 0.00001);
}

From source file:com.splunk.shuttl.server.mbeans.ShuttlArchiverMBeanArchiverRootURITest.java

private void teardown(String hadoopHost, String hadoopPort, File hdfsPropertiesFile) throws IOException {
    createArchiverMbeanWithArchiverRootURI(createHdfsArchiverRootUri(hadoopHost, hadoopPort));
    Properties hdfsProperties = new Properties();
    hdfsProperties.load(FileUtils.openInputStream(hdfsPropertiesFile));
    assertEquals(hadoopHost, hdfsProperties.getProperty("hadoop.host"));
    assertEquals(hadoopPort, hdfsProperties.getProperty("hadoop.port"));
}

From source file:jenkins.plugins.itemstorage.s3.S3UploadAllCallable.java

/**
 * Upload from slave/*from   w  ww  . j  a v  a 2s  .c  o m*/
 */
@Override
public Integer invoke(final TransferManager transferManager, File base, VirtualChannel channel)
        throws IOException, InterruptedException {
    if (!base.exists())
        return 0;

    final AtomicInteger count = new AtomicInteger(0);
    final Uploads uploads = new Uploads();

    final Map<String, S3ObjectSummary> summaries = lookupExistingCacheEntries(
            transferManager.getAmazonS3Client());

    // Find files to upload that match scan
    scanner.scan(base, new FileVisitor() {
        @Override
        public void visit(File f, String relativePath) throws IOException {
            if (f.isFile()) {
                String key = pathPrefix + "/" + relativePath;

                S3ObjectSummary summary = summaries.get(key);
                if (summary == null || f.lastModified() > summary.getLastModified().getTime()) {
                    final ObjectMetadata metadata = buildMetadata(f);

                    uploads.startUploading(transferManager, f,
                            IOUtils.toBufferedInputStream(FileUtils.openInputStream(f)),
                            new Destination(bucketName, key), metadata);

                    if (uploads.count() > 20) {
                        waitForUploads(count, uploads);
                    }
                }
            }
        }
    });

    // Wait for each file to complete before returning
    waitForUploads(count, uploads);

    return uploads.count();
}

From source file:de.pawlidi.openaletheia.license.LicenseLoaderTest.java

/**
 * Test method for//ww w.j a v  a2s  .co  m
 * {@link de.pawlidi.aletheia.license.LicenseLoader#load(java.io.InputStream, java.lang.String)}
 * .
 * 
 * @throws IOException
 * @throws LicenseException
 */
@Test
public void testLoadInputStreamString() throws IOException, LicenseException {
    InputStream inputStream = FileUtils.openInputStream(new File("test.license"));
    properties = licenseLoader.load(inputStream);
    assertNotNull(properties);
}

From source file:gov.nih.nci.caarray.application.fileaccess.FileAccessUtils.java

/**
 * Adds the given file to the given zip output stream using the given name as the zip entry name. This method will
 * NOT call finish on the zip output stream at the end.
 * //from   w  w w.j  a v  a  2 s  . co  m
 * @param zos the zip output stream to add the file to. This stream must already be open.
 * @param file the file to put in the zip.
 * @param name the name to use for this zip entry.
 * @param addAsStored if true, then the file will be added to the zip as a STORED entry (e.g. without applying
 *            compression to it); if false, then the file will be added to the zip as a DEFLATED entry.
 * @throws IOException if there is an error writing to the stream
 */
public void writeZipEntry(ZipOutputStream zos, File file, String name, boolean addAsStored) throws IOException {
    final ZipEntry ze = new ZipEntry(name);
    ze.setMethod(addAsStored ? ZipEntry.STORED : ZipEntry.DEFLATED);
    if (addAsStored) {
        ze.setSize(file.length());
        ze.setCrc(FileUtils.checksumCRC32(file));
    }
    zos.putNextEntry(ze);
    final InputStream is = FileUtils.openInputStream(file);
    IOUtils.copy(is, zos);
    zos.closeEntry();
    zos.flush();
    IOUtils.closeQuietly(is);
}

From source file:architecture.ee.jdbc.sqlquery.SqlQueryHelper.java

public SqlQueryHelper lob(File file) throws IOException {
    InputStream input = FileUtils.openInputStream(file);
    long size = file.length();
    return lob(input, (int) size);
}

From source file:com.dv.util.DataViewerZipUtil.java

/**
 * ./* w ww  .  ja v a2 s .  co  m*/
 * ??, .
 *
 * @param zipFile 
 * @param destFile ?
 */
public static void unzipFile(File zipFile, File destFile) throws IOException {
    unzipFile(FileUtils.openInputStream(zipFile), destFile);
    //      doUnzipFile(new ZipFile(zipFile), destFile);
}

From source file:de.iteratec.iteraplan.businesslogic.exchange.common.informationflow.VisioInformationFlowTemplateParser.java

private Document parseXmlFile() {
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();

    try {/*from w ww.  j av a 2 s  .  co m*/
        DocumentBuilder db = dbf.newDocumentBuilder();
        FileInputStream fis = FileUtils.openInputStream(templateFile);
        return db.parse(fis);
    } catch (SAXException se) {
        LOGGER.error("Error during parsing of template '" + templateFile.getName() + "'.", se);
        throw new IteraplanTechnicalException(IteraplanErrorMessages.INTERNAL_ERROR);
    } catch (ParserConfigurationException pce) {
        LOGGER.error("Error during parser creation.", pce);
        throw new IteraplanTechnicalException(IteraplanErrorMessages.INTERNAL_ERROR);
    } catch (IOException ie) {
        LOGGER.error("I/O Error during parsing of template '" + templateFile.getName() + "'.", ie);
        throw new IteraplanTechnicalException(IteraplanErrorMessages.INTERNAL_ERROR);
    }
}