Example usage for com.liferay.portal.kernel.util StreamUtil transfer

List of usage examples for com.liferay.portal.kernel.util StreamUtil transfer

Introduction

In this page you can find the example usage for com.liferay.portal.kernel.util StreamUtil transfer.

Prototype

public static void transfer(InputStream inputStream, OutputStream outputStream, long length)
            throws IOException 

Source Link

Usage

From source file:au.com.permeance.liferay.util.zip.ZipWriter.java

License:Open Source License

public void addEntry(String path, InputStream is) throws IOException {
    this.zos.putNextEntry(new ZipEntry(path));
    StreamUtil.transfer(is, zos, false);
    this.allocatedPaths.add(path);
}

From source file:com.liferay.frontend.css.rtl.servlet.internal.RTLServlet.java

License:Open Source License

protected URL getResourceURL(HttpServletRequest request) throws IOException {

    String path = URLDecoder.decode(RequestDispatcherUtil.getEffectivePath(request), StringPool.UTF8);

    URL url = _servletContextHelper.getResource(path);

    if (url == null) {
        return null;
    }/*from  w ww .j  a v a 2s .c  om*/

    String languageId = request.getParameter("languageId");

    if ((languageId == null) || !PortalUtil.isRightToLeft(request)) {
        if (_log.isDebugEnabled()) {
            _log.debug("Skip because specified language " + languageId + " is not right to left");
        }

        return url;
    }

    String rtlPath = FileUtil.appendSuffix(path, "_rtl");

    URL rtlURL = _servletContextHelper.getResource(rtlPath);

    if (rtlURL != null) {
        return rtlURL;
    }

    File dataFile = _bundle.getDataFile(rtlPath);

    if (dataFile.exists() && (dataFile.lastModified() > url.openConnection().getLastModified())) {

        URI uri = dataFile.toURI();

        return uri.toURL();
    }

    CSSRTLConverter cssRTLConverter = new CSSRTLConverter(false);

    String rtl = cssRTLConverter.process(StringUtil.read(url.openStream()));

    InputStream inputStream = new ByteArrayInputStream(rtl.getBytes(StringPool.UTF8));

    OutputStream outputStream = null;

    try {
        dataFile.getParentFile().mkdirs();

        dataFile.createNewFile();

        outputStream = new FileOutputStream(dataFile);

        StreamUtil.transfer(inputStream, outputStream, false);
    } catch (IOException ioe) {
        if (_log.isWarnEnabled()) {
            _log.warn("Unable to cache RTL CSS", ioe);
        }
    } finally {
        if (outputStream != null) {
            outputStream.close();
        }
    }

    inputStream.reset();

    URI uri = dataFile.toURI();

    return uri.toURL();
}

From source file:com.liferay.frontend.editor.ckeditor.web.internal.servlet.taglib.CKEditorCreoleOnEditorCreateDynamicInclude.java

License:Open Source License

@Override
public void include(HttpServletRequest request, HttpServletResponse response, String key) throws IOException {

    Bundle bundle = _bundleContext.getBundle();

    URL entryURL = bundle.getEntry("/META-INF/resources/ckeditor/extension" + "/creole_dialog_definition.js");

    StreamUtil.transfer(entryURL.openStream(), response.getOutputStream(), false);

    String toolbarSet = (String) request.getAttribute("liferay-ui:input-editor:toolbarSet");

    if (toolbarSet.equals("creole")) {
        entryURL = bundle.getEntry("/META-INF/resources/ckeditor/extension/creole_dialog_show.js");

        StreamUtil.transfer(entryURL.openStream(), response.getOutputStream(), false);
    }/*from   ww  w  .  j a  va  2s .  c om*/
}

From source file:com.liferay.frontend.editor.ckeditor.web.internal.servlet.taglib.CKEditorOnEditorCreateDynamicInclude.java

License:Open Source License

@Override
public void include(HttpServletRequest request, HttpServletResponse response, String key) throws IOException {

    Bundle bundle = _bundleContext.getBundle();

    URL entryURL = bundle.getEntry("/META-INF/resources/ckeditor/extension/dialog_definition.js");

    StreamUtil.transfer(entryURL.openStream(), response.getOutputStream(), false);
}

From source file:com.liferay.frontend.js.bundle.config.extender.internal.JSBundleConfigServlet.java

License:Open Source License

@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws IOException {

    response.setContentType(ContentTypes.TEXT_JAVASCRIPT_UTF8);

    ServletOutputStream servletOutputStream = response.getOutputStream();

    PrintWriter printWriter = new PrintWriter(servletOutputStream, true);

    Collection<JSConfig> jsConfigs = _jsBundleConfigTracker.getJSConfigs();

    if (!jsConfigs.isEmpty()) {
        printWriter.println("(function() {");

        for (JSConfig jsConfig : jsConfigs) {
            URL url = jsConfig.getURL();

            try (InputStream inputStream = url.openStream()) {
                servletOutputStream.println("try {");

                ServletContext servletContext = jsConfig.getServletContext();

                servletOutputStream.println(StringBundler.concat("var MODULE_PATH = '", _portal.getPathProxy(),
                        servletContext.getContextPath(), "';"));

                StreamUtil.transfer(inputStream, servletOutputStream, false);

                servletOutputStream.println("} catch (error) {");
                servletOutputStream.println("console.error(error);");
                servletOutputStream.println("}");
            } catch (Exception e) {
                _logger.log(Logger.LOG_ERROR, "Unable to open resource", e);
            }//w w  w.  ja v a  2 s .  com
        }

        printWriter.println("}());");
    }

    printWriter.close();
}

From source file:com.liferay.frontend.js.loader.modules.extender.internal.npm.builtin.BaseBuiltInJSModuleServlet.java

License:Open Source License

@Override
protected void service(HttpServletRequest request, HttpServletResponse response)
        throws IOException, ServletException {

    JSModule jsModule = _resolveJSModule(request);

    if (jsModule == null) {
        response.sendError(HttpServletResponse.SC_NOT_FOUND);

        return;/*  w w  w.j  av  a  2 s.  c o  m*/
    }

    String contentType = null;
    InputStream inputStream = null;

    String pathInfo = request.getPathInfo();

    if (pathInfo.endsWith(".map")) {
        contentType = ContentTypes.APPLICATION_JSON;
        inputStream = jsModule.getSourceMapInputStream();
    } else {
        contentType = ContentTypes.TEXT_JAVASCRIPT_UTF8;
        inputStream = jsModule.getInputStream();
    }

    response.setContentType(contentType);

    ServletOutputStream servletOutputStream = response.getOutputStream();

    try {
        StreamUtil.transfer(inputStream, servletOutputStream, false);
    } catch (Exception e) {
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Unable to read file");
    } finally {
        inputStream.close();
    }
}

From source file:com.liferay.hadoop.action.HadoopJob.java

License:Open Source License

public String doExecute(HttpServletRequest request, HttpServletResponse response) throws Exception {

    response.setContentType(ContentTypes.TEXT_PLAIN_UTF8);

    PrintWriter writer = response.getWriter();

    FileSystem fileSystem = HadoopManager.getFileSystem();

    JobClient jobClient = HadoopManager.getJobClient();

    writer.println("-- Job Status --");

    Path inputPath = new Path("/index/*/*");
    Path outputPath = new Path("/wordcount/results");

    try {//w ww. java  2 s. c  o m
        if (_runningJob == null) {
            writer.println("Creating job");

            if (fileSystem.exists(_jobPath)) {
                fileSystem.delete(_jobPath, false);
            }

            if (!fileSystem.exists(_jobPath)) {
                writer.println("Deploying the job code to cluster");

                FSDataOutputStream outputStream = null;

                try {
                    outputStream = fileSystem.create(_jobPath);

                    ServletContext servletContext = HadoopManager.getServletContext();

                    InputStream inputStream = servletContext.getResourceAsStream("/WEB-INF/lib/hadoop-job.jar");

                    StreamUtil.transfer(inputStream, outputStream, false);
                } finally {
                    StreamUtil.cleanUp(outputStream);
                }

                writer.println("Job code deployed to cluster");
            }

            if (fileSystem.exists(outputPath)) {
                writer.println("A previous job output was found, backing it up");

                fileSystem.rename(outputPath,
                        outputPath.getParent().suffix("/.results-" + System.currentTimeMillis()));
            }

            _jobConf = HadoopManager.createNewJobConf();

            _jobConf.setJobName("Word Count");

            writer.println("Job '" + _jobConf.getJobName() + "' is being configured");

            _jobConf.setJarByClass(Map.class);
            _jobConf.setOutputKeyClass(Text.class);
            _jobConf.setOutputValueClass(IntWritable.class);
            _jobConf.setMapperClass(Map.class);
            _jobConf.setCombinerClass(Reduce.class);
            _jobConf.setReducerClass(Reduce.class);
            _jobConf.setInputFormat(TextInputFormat.class);
            _jobConf.setOutputFormat(TextOutputFormat.class);

            writer.println("Job code deployed to distributed cache's classpath");

            DistributedCache.addArchiveToClassPath(_jobPath, _jobConf, fileSystem);

            FileInputFormat.setInputPaths(_jobConf, inputPath);
            FileOutputFormat.setOutputPath(_jobConf, outputPath);

            writer.println("Submitting job the first time");

            _runningJob = jobClient.submitJob(_jobConf);

            writer.println("Job submitted");
        }

        int jobState = _runningJob.getJobState();

        writer.println(
                "Job status: " + jobState + " (RUNNING = 1, SUCCEEDED = 2, FAILED = 3, PREP = 4, KILLED = 5)");

        if ((jobState != JobStatus.RUNNING) && (jobState != JobStatus.PREP)) {

            writer.println("Re-issuing the job");

            if (fileSystem.exists(outputPath)) {
                writer.println("A previous job output was found, backing it up");

                fileSystem.rename(outputPath,
                        outputPath.getParent().suffix("/.results-" + System.currentTimeMillis()));
            }

            writer.println("Submitting job the first time");

            _runningJob = jobClient.submitJob(_jobConf);

            writer.println("Job submitted");
        }
    } catch (Exception ioe) {
        writer.println("Job error: ");

        ioe.printStackTrace(writer);
    }

    writer.flush();
    writer.close();

    return null;
}

From source file:com.liferay.hadoop.store.HDFSStore.java

License:Open Source License

@Override
public void addFile(long companyId, long repositoryId, String fileName, InputStream is)
        throws PortalException, SystemException {

    Path fullPath = HadoopManager.getFullVersionFilePath(companyId, repositoryId, fileName, VERSION_DEFAULT);

    FSDataOutputStream outputStream = null;

    try {//from   w w  w .j a  v  a  2 s . c o  m
        FileSystem fileSystem = HadoopManager.getFileSystem();

        outputStream = fileSystem.create(fullPath);

        StreamUtil.transfer(is, outputStream, false);
    } catch (IOException ioe) {
        throw new SystemException(ioe);
    } finally {
        StreamUtil.cleanUp(outputStream);
    }
}

From source file:com.liferay.hadoop.store.HDFSStore.java

License:Open Source License

@Override
public void updateFile(long companyId, long repositoryId, String fileName, String versionLabel,
        InputStream inputStream) throws PortalException, SystemException {

    Path fullPath = HadoopManager.getFullVersionFilePath(companyId, repositoryId, fileName, versionLabel);

    FSDataOutputStream outputStream = null;

    try {/*from w  w  w.ja  v a 2 s .  c o  m*/
        FileSystem fileSystem = HadoopManager.getFileSystem();

        outputStream = fileSystem.create(fullPath);

        StreamUtil.transfer(inputStream, outputStream, false);
    } catch (IOException ioe) {
        throw new SystemException(ioe);
    } finally {
        StreamUtil.cleanUp(outputStream);
    }
}

From source file:com.liferay.hadoop.util.HadoopManager.java

License:Open Source License

public static void runJob(StoreEvent storeEvent) throws IOException {
    FileSystem fileSystem = getFileSystem();

    if (_servletContext == null) {
        return;/*from ww w  .java 2s.c  o m*/
    }

    JobClient jobClient = getJobClient();

    Path inputPath = new Path("/index".concat(storeEvent.getRootPath().toString()).concat("/*"));
    Path outputPath = new Path("/wordcount".concat(storeEvent.getRootPath().toString()).concat("/results"));

    try {
        if (_runningJob == null) {
            if (!fileSystem.exists(_jobPath)) {
                FSDataOutputStream outputStream = null;

                try {
                    outputStream = fileSystem.create(_jobPath);

                    InputStream inputStream = _servletContext
                            .getResourceAsStream("/WEB-INF/lib/hadoop-job.jar");

                    StreamUtil.transfer(inputStream, outputStream, false);
                } finally {
                    StreamUtil.cleanUp(outputStream);
                }
            }

            if (fileSystem.exists(outputPath)) {
                fileSystem.rename(outputPath,
                        outputPath.getParent().suffix("/.results-" + System.currentTimeMillis()));
            }

            _jobConf = new JobConf(_sharedJobConf);

            _jobConf.setJobName("Word Count");
            _jobConf.setJarByClass(Map.class);
            _jobConf.setOutputKeyClass(Text.class);
            _jobConf.setOutputValueClass(IntWritable.class);
            _jobConf.setMapperClass(Map.class);
            _jobConf.setCombinerClass(Reduce.class);
            _jobConf.setReducerClass(Reduce.class);
            _jobConf.setInputFormat(TextInputFormat.class);
            _jobConf.setOutputFormat(TextOutputFormat.class);

            DistributedCache.addArchiveToClassPath(_jobPath, _jobConf, fileSystem);

            FileInputFormat.setInputPaths(_jobConf, inputPath);
            FileOutputFormat.setOutputPath(_jobConf, outputPath);

            _runningJob = jobClient.submitJob(_jobConf);
        }

        int jobState = _runningJob.getJobState();

        if ((jobState != JobStatus.RUNNING) && (jobState != JobStatus.PREP)) {

            System.out.println("Re-issuing the word count job.");

            if (fileSystem.exists(outputPath)) {
                fileSystem.rename(outputPath,
                        outputPath.getParent().suffix("/.results-" + System.currentTimeMillis()));
            }

            _runningJob = jobClient.submitJob(_jobConf);
        }
    } catch (Exception ioe) {
        ioe.printStackTrace();
    }
}