Example usage for org.apache.commons.io FileUtils openOutputStream

List of usage examples for org.apache.commons.io FileUtils openOutputStream

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils openOutputStream.

Prototype

public static FileOutputStream openOutputStream(File file) throws IOException 

Source Link

Document

Opens a FileOutputStream for the specified file, checking and creating the parent directory if it does not exist.

Usage

From source file:net.sourceforge.tess4j.util.LoadLibs.java

/**
 * Copies resources from the jar file of the current thread and extract it
 * to the destination path./*from   w  ww. j ava 2s . c o  m*/
 *
 * @param jarConnection
 * @param destPath destination file or directory
 */
static void copyJarResourceToPath(JarURLConnection jarConnection, File destPath) {
    try {
        JarFile jarFile = jarConnection.getJarFile();
        String jarConnectionEntryName = jarConnection.getEntryName();

        /**
         * Iterate all entries in the jar file.
         */
        for (Enumeration<JarEntry> e = jarFile.entries(); e.hasMoreElements();) {
            JarEntry jarEntry = e.nextElement();
            String jarEntryName = jarEntry.getName();

            /**
             * Extract files only if they match the path.
             */
            if (jarEntryName.startsWith(jarConnectionEntryName)) {
                String filename = jarEntryName.substring(jarConnectionEntryName.length());
                File currentFile = new File(destPath, filename);

                if (jarEntry.isDirectory()) {
                    currentFile.mkdirs();
                } else {
                    currentFile.deleteOnExit();
                    InputStream is = jarFile.getInputStream(jarEntry);
                    OutputStream out = FileUtils.openOutputStream(currentFile);
                    IOUtils.copy(is, out);
                    is.close();
                    out.close();
                }
            }
        }
    } catch (IOException e) {
        logger.log(Level.SEVERE, e.getMessage(), e);
    }
}

From source file:net.tomp2p.simgrid.SimGridTomP2P.java

private static void loadLib(String name) throws IOException {
    String pathJar = null;/*ww w. jav a 2 s .c o  m*/
    String pathEclipse = null;
    if (OSTester.is64bit() && OSTester.isUnix()) {
        name = "lib" + name + ".so";
        //jar version
        pathJar = "libs" + File.separator + "x64" + File.separator + name;
        //eclipse workspace version
        pathEclipse = File.separator + "libs" + File.separator + "x64" + File.separator + name;
    }
    if (pathJar == null || pathEclipse == null) {
        throw new IOException("Platform not supported");
    }
    InputStream in = SimGridTomP2P.class.getResourceAsStream(pathJar);
    if (in == null) {
        in = SimGridTomP2P.class.getResourceAsStream(pathEclipse);
    }
    File fileOut = new File(System.getProperty("java.io.tmpdir") + "/" + name);
    fileOut.deleteOnExit();
    OutputStream out = FileUtils.openOutputStream(fileOut);
    IOUtils.copy(in, out);
    in.close();
    out.close();
    System.load(fileOut.toString());
}

From source file:net.yoomai.virgo.spider.Emulator.java

/**
 * ??/*from ww  w. j  a  v  a 2  s  .co m*/
 *
 * @param url
 * @param cookies
 */
public void getPage(String id, String filename, String url, String cookies) {
    HttpGet httpGet = new HttpGet(url);
    // cookie
    httpGet.setHeader("Cookie", cookies);
    // Content-Type
    String contentType = config.getString("spider" + id + ".content_type");
    if (contentType == null || "".equals(contentType.trim())) {
        contentType = "text/html;charset=gb2312";
    }
    httpGet.setHeader("Content-Type", contentType);
    // User-Agent
    String agent = config.getString("spider" + id + ".agent");
    if (agent != null && !"".equals(agent.trim())) {
        httpGet.setHeader("User-Agent", agent);
    }

    CloseableHttpClient httpClient = HttpClients.createDefault();
    CloseableHttpResponse response = null;
    try {
        response = httpClient.execute(httpGet);
    } catch (IOException e) {
        log.error(e.getMessage() + " : " + e.getCause());
        response = null;
    }

    if (response != null) {
        StatusLine statusLine = response.getStatusLine();
        if (statusLine.getStatusCode() == 200) {
            try {
                InputStream is = response.getEntity().getContent();
                // OutputStream outputStream = System.out;
                SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
                // ????
                OutputStream outputStream = FileUtils
                        .openOutputStream(new File(config.getString("spider" + id + ".store") + filename + "-"
                                + sdf.format(new Date()) + ".html"));

                byte[] bytes = new byte[1024];
                int count = 0;
                while ((count = is.read(bytes)) != -1) {
                    outputStream.write(bytes, 0, count);
                }
            } catch (IOException e) {
                log.error("??: " + e.getMessage() + " : " + e.getCause());
            }
        } else {
            log.info("????: " + statusLine.getStatusCode());
        }
    } else {
        log.error("GetPage???????.");
    }
}

From source file:nl.knaw.huygens.alexandria.text.FileSystemTextService.java

@Override
public void setFromStream(UUID resourceUUID, InputStream inputStream) {
    File textFile = textFile(resourceUUID);
    try {/*  w  w w . j av a2  s  . c o m*/
        FileOutputStream outputStream = FileUtils.openOutputStream(textFile);
        IOUtils.copy(inputStream, outputStream);
        inputStream.close();
        outputStream.close();

    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:omm.OMMEnhancer.java

public void computeEnhancements(ContentItem ci) throws EngineException {
    try {//from w w  w  .  j a  v  a  2s .  c om
        //get the (generated or submitted) text version of the ContentItem
        Blob textBlob = ContentItemHelper.getBlob(ci, Collections.singleton("text/plain")).getValue();
        String content = ContentItemHelper.getText(textBlob);

        // write content to a temporary file
        File tempInput = File.createTempFile("input", null);
        IOUtils.write(content, FileUtils.openOutputStream(tempInput));

        // run gate wrapper and map output to triples
        try {
            Document document = Factory.newDocument(tempInput.toURI().toURL());
            ommWrapper.execute(document);
            VariOMapping.mapAnnotations(ci, this, document.getAnnotations());
        } catch (ResourceInstantiationException e) {
            log.error("Unable to initialize gate resource", e);
            throw new EngineException("Unable to initialize gate resource", e);
        } catch (GateException e) {
            log.error("Gate exception", e);
            throw new EngineException("Gate exception", e);
        } finally {
            FileUtils.deleteQuietly(tempInput);
        }
    } catch (IOException ex) {
        log.error("Exception reading content item.", ex);
        throw new InvalidContentException("Exception reading content item.", ex);
    }
}

From source file:org.ado.biblio.desktop.AppConfiguration.java

private static void store() {
    try {/*from w w w  . jav  a  2s.  c om*/
        config.store(FileUtils.openOutputStream(CONFIG), "Biblio Configuration");
    } catch (IOException e) {
        throw new IllegalStateException("Cannot save application configuration file", e);
    }
}

From source file:org.ado.musicdroid.common.AppConfiguration.java

private static void store() {
    try {/*  w ww .j  av  a 2s.c  om*/
        config.store(FileUtils.openOutputStream(CONFIG), "Music Droid Configuration");
    } catch (IOException e) {
        throw new IllegalStateException("Cannot save application configuration file", e);
    }
}

From source file:org.ala.apps.RkColumnFamilyExporter.java

private void exportRanks() throws Exception {
    FileOutputStream csvOut = FileUtils
            .openOutputStream(new File("/data/bie/rk_" + System.currentTimeMillis() + ".csv"));

    long start = System.currentTimeMillis();
    int i = 0;/*w w w. j a  v  a  2 s .  c  om*/
    int j = 0;
    logger.debug("export Ranks...");
    Scanner scanner = storeHelper.getScanner(RK_COLUMN_FAMILY, RK_COLUMN_FAMILY, "", ColumnType.columnsToIndex);
    byte[] guidAsBytes = null;
    while ((guidAsBytes = scanner.getNextGuid()) != null) {
        String guid = new String(guidAsBytes);
        String taxonGuid = guid.substring(0, guid.indexOf("|"));
        String sciName = "";
        try {
            ExtendedTaxonConceptDTO etc = taxonConceptDao.getExtendedTaxonConceptByGuid(taxonGuid, false);
            if (etc != null && etc.getTaxonConcept() != null) {
                sciName = etc.getTaxonConcept().getNameString();
            }
        } catch (Exception e) {
            logger.error("etc.getTaxonConcept(): " + e);
        }

        i++;
        try {
            //get taxon concept details
            List<String> list = storeHelper.getSuperColumnsByGuid(guid, RK_COLUMN_FAMILY);
            for (String superColumnName : list) {
                try {
                    Map<String, List<Comparable>> columnList = storeHelper.getColumnList(RK_COLUMN_FAMILY, guid,
                            Map.class);
                    Set<String> keys = columnList.keySet();
                    Iterator<String> itr = keys.iterator();
                    while (itr.hasNext()) {
                        j++;
                        String key = itr.next();
                        List rankingList = columnList.get(key);
                        String type = guid.substring(guid.indexOf("|") + 1, guid.length());
                        csvOut.write((guid + "; " + sciName + "; " + type + "; " + key + "; "
                                + mapper.writeValueAsString(rankingList) + "\n").getBytes());
                        logger.debug("Indexed records: " + j + ", current guid: " + guid);
                    }
                } catch (Exception ex) {
                    logger.error("***** guid: " + guid + " ," + ex);
                }
            }
        } catch (Exception ex) {
            logger.error("***** guid: " + guid + " ," + ex);
        }
    }

    csvOut.flush();
    csvOut.close();

    long finish = System.currentTimeMillis();
    logger.info("Index created in: " + ((finish - start) / 1000) + " seconds with  species: " + i
            + ", column items: " + j);
    logger.debug("reload Ranks finished...");
}

From source file:org.ala.hbase.RepoDataLoader.java

/**
 * Scan through the repository, retrieve triples and
 * add to taxon concepts/*from w w  w.  ja va  2 s .  c  o m*/
 *
 * @param filePath Root directory of harvested repository
 * @param repoDirs Optional array of Infosource directories to scan passed as program arguments
 * @throws Exception
 */
public int load(String filePath, String[] repoDirs, boolean allowStats) throws Exception {
    guidList = new ArrayList<String>();
    String lsidFileName = "/data/bie/repoLoader_guid_" + System.currentTimeMillis() + ".csv";

    FileOutputStream statsOut = null;

    logger.info("Scanning directory: " + filePath);

    //open the statistics file
    if (allowStats) {
        statsOut = FileUtils.openOutputStream(
                new File("/data/bie/bie_name_matching_stats_" + System.currentTimeMillis() + ".csv"));
        statsOut.write(
                "InfoSource ID, InfoSource Name, URL, ANBG matches, Other matches, Missing, Homonyms detected\n"
                        .getBytes());
    }

    if (gList || reindex) {
        guidOut = FileUtils.openOutputStream(new File(lsidFileName));
    }

    // reset counts
    totalFilesRead = 0;
    totalPropertiesSynced = 0;

    //start scan
    File file = new File(filePath);
    File[] dirs = null;

    // See if array of infosource directories passed as program arguments
    if (repoDirs.length > 0) {
        dirs = new File[repoDirs.length];
        for (int i = 0; i < repoDirs.length; i++) {
            dirs[i] = new File(file.getAbsolutePath() + File.separator + repoDirs[i]);
            logger.info("Processing directories..." + dirs[i].getAbsolutePath());
        }
    } else {
        //list immediate directories - this will give the
        logger.info("Listing all directories...");
        dirs = file.listFiles();
    }

    //go through each infosource directory
    for (File childFile : dirs) {
        logger.info("Listing directories for infosource directory: " + childFile.getAbsolutePath());

        if (childFile.isDirectory()) {
            taxonConceptDao.resetStats();
            //  takes us to /data/bie/<infosource-id>/<section-id>
            logger.info("Listing directories for the section: " + childFile.getAbsolutePath());
            File[] infosourceSection = childFile.listFiles();
            for (File sectionDirectory : infosourceSection) {
                //this will list all the files in the
                if (sectionDirectory.isDirectory()) {
                    File[] dirsToScan = sectionDirectory.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY);
                    scanDirectory(dirsToScan);
                }
            }
            if (allowStats) {
                //report the stats
                if (org.apache.commons.lang.StringUtils.isNumeric(childFile.getName())) {
                    InfoSource infoSource = infoSourceMap.get(new Integer(childFile.getName()));
                    taxonConceptDao.reportStats(statsOut,
                            infoSource.getId() + "," + infoSource.getName() + "," + infoSource.getWebsiteUrl());
                }
            }

        }

    }

    logger.info("Files read: " + totalFilesRead + ", files matched: " + totalPropertiesSynced);
    if (allowStats) {
        statsOut.flush();
        statsOut.close();
    }
    if (gList) {
        guidOut.flush();
        guidOut.close();
    }
    if (reindex) {

        if (!gList) {
            //only want to include unique lsids
            Set<String> guids = new java.util.HashSet<String>(guidList);
            for (String guid : guids)
                guidOut.write((guid + "\n").getBytes());
            guidOut.flush();
            guidOut.close();
        }

        //NC 2013-045-30: use the Partial Index to automatically reindex the values in the file. This batches them into manageable chunks          
        indexer.process(lsidFileName);

        //            //This results in SOLR file locking problems.
        //            //solrUtils.getSolrServer().commit();
        //
        //            // need to call http://bie.ala.org.au/ws/admin/reindex with a JSON array of GUIDS to reindex
        //            logger.debug("Calling bie service to reindex " + guidList.size());
        //            HttpClient httpClient = new HttpClient();
        //            PostMethod post = new PostMethod(reindexUrl);
        //            ObjectMapper mapper = new ObjectMapper();            
        //            
        //
        ////            StringBuilder jsonBuilder = new StringBuilder();
        ////            jsonBuilder.append("[");
        ////            for (int i = 0; i < guidList.size(); i++) {
        ////                jsonBuilder.append("\"" + guidList.get(i) + "\"");
        ////
        ////                if (i < guidList.size() - 1) {
        ////                    jsonBuilder.append(",");
        ////                }
        ////            }
        ////            jsonBuilder.append("]");
        //
        //            post.setRequestHeader("Content-Type", "application/json");
        //            post.setRequestBody(mapper.writeValueAsString(guidList));
        //
        //            try {
        //                int returnCode = httpClient.executeMethod(post);
        //                if (returnCode != 200) {
        //                    logger.error("Error submitting reindex request: " + post.getResponseBodyAsString());
        //                }
        //            } catch (Exception ex) {
        //                logger.error("Error submitting reindex request", ex);
        //                logger.info(guidList);
        //            }

    }

    return totalFilesRead;
}

From source file:org.alfresco.solr.content.SolrFileContentWriter.java

@Override
public synchronized OutputStream getContentOutputStream() throws ContentIOException {
    if (written == true) {
        throw new IllegalStateException("The writer has already been used: " + file);
    } else if (file.exists()) {
        throw new IllegalStateException("The file already exists: " + file);
    }//  ww  w  .  ja v  a2 s . c o m
    try {
        OutputStream is = new BufferedOutputStream(FileUtils.openOutputStream(file));
        written = true;
        // done
        return is;
    } catch (Throwable e) {
        throw new ContentIOException("Failed to open stream onto file: " + file, e);
    }
}