Example usage for com.google.common.io OutputSupplier OutputSupplier

List of usage examples for com.google.common.io OutputSupplier OutputSupplier

Introduction

In this page you can find the example usage for com.google.common.io OutputSupplier OutputSupplier.

Prototype

OutputSupplier

Source Link

Usage

From source file:com.xebialabs.overthere.itest.ItestsBase1Utils.java

protected static byte[] writeRandomBytes(final File f, final int size) throws IOException {
    byte[] randomBytes = generateRandomBytes(size);
    write(randomBytes, new OutputSupplier<OutputStream>() {
        @Override/* w w  w  .  j  ava  2  s.  c om*/
        public OutputStream getOutput() throws IOException {
            return new FileOutputStream(f);
        }
    });
    return randomBytes;
}

From source file:com.xebialabs.overthere.util.OverthereFileCopier.java

/**
 * Copies a regular file.//from  w  ww .ja v a 2s .  co  m
 * 
 * @param srcFile
 *            the source file. Must exists and must not be a directory.
 * @param dstFile
 *            the destination file. May exists but must not be a directory. Its parent directory must exist.
 * @throws com.xebialabs.deployit.exception.RuntimeIOException
 *             if an I/O error occurred
 */
private static void copyFile(final OverthereFile srcFile, final OverthereFile dstFile)
        throws RuntimeIOException {
    checkFileExists(srcFile, SOURCE);
    checkReallyIsAFile(dstFile, DESTINATION);

    if (logger.isDebugEnabled()) {
        if (dstFile.exists())
            logger.debug("About to overwrite existing file " + dstFile);
        logger.debug("Copying file " + srcFile + " to " + dstFile);
    }

    try {
        ByteStreams.copy(new InputSupplier<InputStream>() {
            public InputStream getInput() throws IOException {
                return srcFile.getInputStream();
            }
        }, new OutputSupplier<OutputStream>() {
            public OutputStream getOutput() throws IOException {
                return dstFile.getOutputStream();
            }
        });
    } catch (IOException exc) {
        throw new RuntimeIOException("Cannot copy " + srcFile + " to " + dstFile, exc);
    }
}

From source file:eu.numberfour.n4js.tests.projectModel.EclipseBasedProjectModelSetup.java

private void createArchive(String projectName) throws CoreException, IOException {
    IProject project = workspace.getProject(projectName);
    IFolder libFolder = project.getFolder(LIB_FOLDER_NAME);
    libFolder.create(false, true, null);

    IFile archiveFile = libFolder.getFile(host.archiveProjectName + ".nfar");
    ByteArrayOutputStream byteArrayOutputSteam = new ByteArrayOutputStream();
    final ZipOutputStream zipOutputStream = new ZipOutputStream(byteArrayOutputSteam);
    zipOutputStream.putNextEntry(new ZipEntry("src/A.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/B.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/sub/B.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/sub/C.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/sub/leaf/D.js"));

    zipOutputStream.putNextEntry(new ZipEntry(IN4JSProject.N4MF_MANIFEST));
    // this will close the stream
    CharStreams.write("ArtifactId: " + host.archiveProjectName + "\n" + "VendorId: eu.numberfour\n"
            + "ProjectName: \"N4JS Tests\"\n" + "VendorName: \"NumberFour AG\"\n" + "ProjectType: lib\n"
            + "ProjectVersion: 0.0.1-SNAPSHOT\n" + "Libraries { \"" + LIB_FOLDER_NAME + "\"\n }\n"
            + "Output: \"src-gen\"" + "Sources {\n" + "   source { " + "      \"src\"\n" + "   }\n" + "}\n",
            CharStreams.newWriterSupplier(new OutputSupplier<ZipOutputStream>() {
                @Override//  w  w w  .  j a  v a 2s  . c o  m
                public ZipOutputStream getOutput() throws IOException {
                    return zipOutputStream;
                }
            }, Charsets.UTF_8));

    archiveFile.create(new ByteArrayInputStream(byteArrayOutputSteam.toByteArray()), false, null);

    host.setArchiveFileURI(URI.createPlatformResourceURI(archiveFile.getFullPath().toString(), true));
}

From source file:eu.numberfour.n4js.tests.projectModel.FileBasedProjectModelSetup.java

private void createArchive(URI baseDir) throws IOException {
    File directory = new File(java.net.URI.create(baseDir.toString()));
    File lib = new File(directory, "lib");
    assertTrue(lib.mkdir());/*from w  w w  . j  av a  2  s.co m*/
    File nfar = new File(lib, host.archiveProjectName + ".nfar");
    final ZipOutputStream zipOutputStream = new ZipOutputStream(new FileOutputStream(nfar));
    zipOutputStream.putNextEntry(new ZipEntry("src/A.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/B.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/sub/B.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/sub/C.js"));
    zipOutputStream.putNextEntry(new ZipEntry("src/sub/leaf/D.js"));

    zipOutputStream.putNextEntry(new ZipEntry(IN4JSProject.N4MF_MANIFEST));
    // this will close the stream
    CharStreams.write(
            "ArtifactId: " + host.archiveProjectName + "\n" + "VendorId: eu.numberfour\n"
                    + "ProjectName: \"N4JS Tests\"\n" + "VendorName: \"NumberFour AG\"\n" + "ProjectType: lib\n"
                    + "ProjectVersion: 0.0.1-SNAPSHOT\n" + "Output: \"src-gen\"\n" + "Sources {\n"
                    + "   source {" + "      \"src\"\n" + "   }\n" + "}",
            CharStreams.newWriterSupplier(new OutputSupplier<ZipOutputStream>() {
                @Override
                public ZipOutputStream getOutput() throws IOException {
                    return zipOutputStream;
                }
            }, Charsets.UTF_8));
    host.setArchiveFileURI(URI.createURI(nfar.toURI().toString()));
}

From source file:io.druid.segment.StringDimensionMergerLegacy.java

@Override
public void writeIndexesToFiles(final ByteSink invertedIndexFile,
        final OutputSupplier<FileOutputStream> spatialIndexFile) throws IOException {
    final SerializerUtils serializerUtils = new SerializerUtils();
    final OutputSupplier<OutputStream> invertedIndexOutputSupplier = new OutputSupplier<OutputStream>() {
        @Override/*from  w  w w  .j  a v  a2  s.c  om*/
        public OutputStream getOutput() throws IOException {
            return invertedIndexFile.openStream();
        }
    };

    bitmapWriter.close();
    serializerUtils.writeString(invertedIndexOutputSupplier, dimensionName);
    ByteStreams.copy(bitmapWriter.combineStreams(), invertedIndexOutputSupplier);

    if (capabilities.hasSpatialIndexes()) {
        spatialWriter.close();
        serializerUtils.writeString(spatialIndexFile, dimensionName);
        ByteStreams.copy(spatialWriter.combineStreams(), spatialIndexFile);
        spatialIoPeon.cleanup();
    }
}

From source file:co.cask.cdap.app.runtime.spark.SparkRuntimeUtils.java

/**
 * Creates a zip file which contains a serialized {@link Properties} with a given zip entry name, together with
 * all files under the given directory. This is called from Client.createConfArchive() as a workaround for the
 * SPARK-13441 bug.//from ww w.  ja va 2  s. c  o m
 *
 * @param sparkConf the {@link SparkConf} to save
 * @param propertiesEntryName name of the zip entry for the properties
 * @param confDirPath directory to scan for files to include in the zip file
 * @param outputZipPath output file
 * @return the zip file
 */
public static File createConfArchive(SparkConf sparkConf, final String propertiesEntryName, String confDirPath,
        String outputZipPath) {
    final Properties properties = new Properties();
    for (Tuple2<String, String> tuple : sparkConf.getAll()) {
        properties.put(tuple._1(), tuple._2());
    }

    try {
        File confDir = new File(confDirPath);
        final File zipFile = new File(outputZipPath);
        BundleJarUtil.createArchive(confDir, new OutputSupplier<ZipOutputStream>() {
            @Override
            public ZipOutputStream getOutput() throws IOException {
                ZipOutputStream zipOutput = new ZipOutputStream(new FileOutputStream(zipFile));
                zipOutput.putNextEntry(new ZipEntry(propertiesEntryName));
                properties.store(zipOutput, "Spark configuration.");
                zipOutput.closeEntry();

                return zipOutput;
            }
        });
        LOG.debug("Spark config archive created at {} from {}", zipFile, confDir);
        return zipFile;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:co.cask.cdap.data2.util.hbase.CoprocessorManager.java

/**
 * Get the location of the coprocessor and ensure it exists, optionally overwriting it if it exists.
 * In distributed mode, the coprocessor jar is loaded onto hdfs by the CoprocessorBuildTool,
 * but in other modes it is still useful to create the jar on demand.
 *
 * @param overwrite whether to overwrite the coprocessor if it already exists
 * @return the location of the coprocessor
 * @throws IOException if there was an issue accessing the location
 *//*from w w w.ja  v  a 2 s  .c om*/
public synchronized Location ensureCoprocessorExists(boolean overwrite) throws IOException {

    final Location targetPath = jarDir.append(getCoprocessorName());
    if (!overwrite && targetPath.exists()) {
        return targetPath;
    }

    // ensure the jar directory exists
    Locations.mkdirsIfNotExists(jarDir);

    StringBuilder buf = new StringBuilder();
    for (Class<? extends Coprocessor> c : coprocessors) {
        buf.append(c.getName()).append(", ");
    }

    LOG.debug("Creating jar file for coprocessor classes: {}", buf.toString());

    final Map<String, URL> dependentClasses = new HashMap<>();
    for (Class<? extends Coprocessor> clz : coprocessors) {
        Dependencies.findClassDependencies(clz.getClassLoader(), new ClassAcceptor() {
            @Override
            public boolean accept(String className, final URL classUrl, URL classPathUrl) {
                // Assuming the endpoint and protocol class doesn't have dependencies
                // other than those comes with HBase, Java, fastutil, and gson
                if (className.startsWith("co.cask") || className.startsWith("it.unimi.dsi.fastutil")
                        || className.startsWith("org.apache.tephra")
                        || className.startsWith("com.google.gson")) {
                    if (!dependentClasses.containsKey(className)) {
                        dependentClasses.put(className, classUrl);
                    }
                    return true;
                }
                return false;
            }
        }, clz.getName());
    }

    if (dependentClasses.isEmpty()) {
        return null;
    }

    // create the coprocessor jar on local filesystem
    LOG.debug("Adding " + dependentClasses.size() + " classes to jar");
    File jarFile = File.createTempFile("coprocessor", ".jar");
    byte[] buffer = new byte[4 * 1024];
    try (JarOutputStream jarOutput = new JarOutputStream(new FileOutputStream(jarFile))) {
        for (Map.Entry<String, URL> entry : dependentClasses.entrySet()) {
            jarOutput.putNextEntry(new JarEntry(entry.getKey().replace('.', File.separatorChar) + ".class"));

            try (InputStream inputStream = entry.getValue().openStream()) {
                int len = inputStream.read(buffer);
                while (len >= 0) {
                    jarOutput.write(buffer, 0, len);
                    len = inputStream.read(buffer);
                }
            }
        }
    } catch (IOException e) {
        LOG.error("Unable to create temporary local coprocessor jar {}.", jarFile.getAbsolutePath(), e);
        if (!jarFile.delete()) {
            LOG.warn("Unable to clean up temporary local coprocessor jar {}.", jarFile.getAbsolutePath());
        }
        throw e;
    }

    // copy the local jar file to the filesystem (HDFS)
    // copies to a tmp location then renames the tmp location to the target location in case
    // multiple CoprocessorManagers we called at the same time. This should never be the case in distributed
    // mode, as coprocessors should all be loaded beforehand using the CoprocessorBuildTool.
    final Location tmpLocation = jarDir.getTempFile(".jar");
    try {
        // Copy jar file into filesystem (HDFS)
        Files.copy(jarFile, new OutputSupplier<OutputStream>() {
            @Override
            public OutputStream getOutput() throws IOException {
                return tmpLocation.getOutputStream();
            }
        });
    } catch (IOException e) {
        LOG.error("Unable to copy local coprocessor jar to filesystem at {}.", tmpLocation, e);
        if (tmpLocation.exists()) {
            LOG.info("Deleting partially copied coprocessor jar at {}.", tmpLocation);
            try {
                if (!tmpLocation.delete()) {
                    LOG.error("Unable to delete partially copied coprocessor jar at {}.", tmpLocation, e);
                }
            } catch (IOException e1) {
                LOG.error("Unable to delete partially copied coprocessor jar at {}.", tmpLocation, e1);
                e.addSuppressed(e1);
            }
        }
        throw e;
    } finally {
        if (!jarFile.delete()) {
            LOG.warn("Unable to clean up temporary local coprocessor jar {}.", jarFile.getAbsolutePath());
        }
    }

    tmpLocation.renameTo(targetPath);
    return targetPath;
}

From source file:co.cask.cdap.common.io.Locations.java

/**
 * Creates a new {@link OutputSupplier} that can provides {@link OutputStream} for the given location.
 *
 * @param location Location for the output.
 * @return A {@link OutputSupplier}./*from ww  w  .j  av a  2  s.com*/
 */
public static OutputSupplier<? extends OutputStream> newOutputSupplier(final Location location) {
    return new OutputSupplier<OutputStream>() {
        @Override
        public OutputStream getOutput() throws IOException {
            return location.getOutputStream();
        }
    };
}

From source file:net.sourceforge.ganttproject.document.webdav.MiltonResourceImpl.java

@Override
public void write(byte[] byteArray) throws WebDavException {
    MiltonResourceImpl parent = (MiltonResourceImpl) getParent();
    if (!parent.exists()) {
        throw new WebDavException(MessageFormat.format("Folder {0} does not exist", parent.getName()));
    }//www  .ja v a2 s . c  om
    assert parent.myImpl instanceof Folder;
    Folder parentFolder = (Folder) parent.myImpl;
    try {
        final java.io.File tempFile = java.io.File.createTempFile("webdav-" + myUrl.hostUrl, "");
        ByteStreams.write(byteArray, new OutputSupplier<OutputStream>() {
            @Override
            public OutputStream getOutput() throws IOException {
                return new BufferedOutputStream(new FileOutputStream(tempFile));
            }
        });
        parentFolder.uploadFile(getName(), tempFile, null);
    } catch (NotAuthorizedException e) {
        throw new WebDavException(MessageFormat.format("User {0} is probably not authorized to access {1}",
                getUsername(), myUrl.hostName), e);
    } catch (BadRequestException e) {
        throw new WebDavException(MessageFormat.format("Bad request when accessing {0}", myUrl.hostName), e);
    } catch (HttpException e) {
        throw new WebDavException(MessageFormat.format("HTTP problems when accessing {0}", myUrl.hostName), e);
    } catch (ConflictException e) {
        throw new WebDavException(MessageFormat.format("Conflict when accessing {0}", myUrl.hostName), e);
    } catch (NotFoundException e) {
        throw new WebDavException(
                MessageFormat.format("Resource {0} is not found on {1}", myUrl.path, myUrl.hostName), e);
    } catch (FileNotFoundException e) {
        throw new WebDavException(
                MessageFormat.format("I/O problems when uploading {0} to {1}", myUrl.path, myUrl.hostName), e);
    } catch (IOException e) {
        throw new WebDavException(
                MessageFormat.format("I/O problems when uploading {0} to {1}", myUrl.path, myUrl.hostName), e);
    }
}

From source file:co.cask.tigon.data.util.hbase.HBaseTableUtil.java

public static Location createCoProcessorJar(String filePrefix, Location jarDir,
        Iterable<? extends Class<? extends Coprocessor>> classes) throws IOException {
    StringBuilder buf = new StringBuilder();
    for (Class<? extends Coprocessor> c : classes) {
        buf.append(c.getName()).append(", ");
    }/*  w w w . j  a  va 2s  .com*/
    if (buf.length() == 0) {
        return null;
    }

    LOG.debug("Creating jar file for coprocessor classes: " + buf.toString());
    final Hasher hasher = Hashing.md5().newHasher();
    final byte[] buffer = new byte[COPY_BUFFER_SIZE];

    final Map<String, URL> dependentClasses = new HashMap<String, URL>();
    for (Class<? extends Coprocessor> clz : classes) {
        Dependencies.findClassDependencies(clz.getClassLoader(), new Dependencies.ClassAcceptor() {
            @Override
            public boolean accept(String className, final URL classUrl, URL classPathUrl) {
                // Assuming the endpoint and protocol class doesn't have dependencies
                // other than those comes with HBase and Java.
                if (className.startsWith("co.cask")) {
                    if (!dependentClasses.containsKey(className)) {
                        dependentClasses.put(className, classUrl);
                    }
                    return true;
                }
                return false;
            }
        }, clz.getName());
    }

    if (!dependentClasses.isEmpty()) {
        LOG.debug("Adding " + dependentClasses.size() + " classes to jar");
        File jarFile = File.createTempFile(filePrefix, ".jar");
        try {
            JarOutputStream jarOutput = null;
            try {
                jarOutput = new JarOutputStream(new FileOutputStream(jarFile));
                for (Map.Entry<String, URL> entry : dependentClasses.entrySet()) {
                    try {
                        jarOutput.putNextEntry(
                                new JarEntry(entry.getKey().replace('.', File.separatorChar) + ".class"));
                        InputStream inputStream = entry.getValue().openStream();

                        try {
                            int len = inputStream.read(buffer);
                            while (len >= 0) {
                                hasher.putBytes(buffer, 0, len);
                                jarOutput.write(buffer, 0, len);
                                len = inputStream.read(buffer);
                            }
                        } finally {
                            inputStream.close();
                        }
                    } catch (IOException e) {
                        LOG.info("Error writing to jar", e);
                        throw Throwables.propagate(e);
                    }
                }
            } finally {
                if (jarOutput != null) {
                    jarOutput.close();
                }
            }

            // Copy jar file into HDFS
            // Target path is the jarDir + jarMD5.jar
            final Location targetPath = jarDir.append("coprocessor" + hasher.hash().toString() + ".jar");

            // If the file exists and having same since, assume the file doesn't changed
            if (targetPath.exists() && targetPath.length() == jarFile.length()) {
                return targetPath;
            }

            // Copy jar file into filesystem
            if (!jarDir.mkdirs() && !jarDir.exists()) {
                throw new IOException("Fails to create directory: " + jarDir.toURI());
            }
            Files.copy(jarFile, new OutputSupplier<OutputStream>() {
                @Override
                public OutputStream getOutput() throws IOException {
                    return targetPath.getOutputStream();
                }
            });
            return targetPath;
        } finally {
            jarFile.delete();
        }
    }
    // no dependent classes to add
    return null;
}