Example usage for org.apache.commons.io IOUtils copyLarge

List of usage examples for org.apache.commons.io IOUtils copyLarge

Introduction

In this page you can find the example usage for org.apache.commons.io IOUtils copyLarge.

Prototype

public static long copyLarge(Reader input, Writer output) throws IOException 

Source Link

Document

Copy chars from a large (over 2GB) Reader to a Writer.

Usage

From source file:org.apache.kylin.rest.controller.BasicController.java

protected void setDownloadResponse(String downloadFile, final HttpServletResponse response) {
    File file = new File(downloadFile);
    try (InputStream fileInputStream = new FileInputStream(file);
            OutputStream output = response.getOutputStream();) {
        response.reset();//w ww.  j  a  va2 s . c o m
        response.setContentType("application/octet-stream");
        response.setContentLength((int) (file.length()));
        response.setHeader("Content-Disposition", "attachment; filename=\"" + file.getName() + "\"");
        IOUtils.copyLarge(fileInputStream, output);
        output.flush();
    } catch (IOException e) {
        throw new InternalErrorException("Failed to download file: " + e.getMessage(), e);
    }
}

From source file:org.apache.kylin.rest.controller.DiagnosisController.java

private void setDownloadResponse(String downloadFile, final HttpServletResponse response) {
    File file = new File(downloadFile);
    try (InputStream fileInputStream = new FileInputStream(file);
            OutputStream output = response.getOutputStream();) {
        response.reset();/*  w  w w . ja  va 2s. com*/
        response.setContentType("application/octet-stream");
        response.setContentLength((int) (file.length()));
        response.setHeader("Content-Disposition", "attachment; filename=\"" + file.getName() + "\"");
        IOUtils.copyLarge(fileInputStream, output);
        output.flush();
    } catch (IOException e) {
        throw new InternalErrorException("Failed to create download for diagnosis. " + e.getMessage(), e);
    }
}

From source file:org.apache.maven.classpath.munger.validation.JarValidationUtilsTest.java

@Test
public void testSignatureOnSameJar() throws Exception {
    URL orgData = getClassContainerLocationURL(Assert.class);
    assertNotNull("Cannot find source URL", orgData);

    File cpyData = createTempFile(getCurrentTestName(), ".jar");
    try (InputStream input = orgData.openStream()) {
        try (OutputStream output = new FileOutputStream(cpyData)) {
            long cpySize = IOUtils.copyLarge(input, output);
            logger.info("Copy(" + orgData.toExternalForm() + ")[" + cpyData.getAbsolutePath() + "]: " + cpySize
                    + " bytes");
        }//from w  ww .j a  v a 2  s  .c  o  m
    }

    NamedPropertySource expected = JarValidationUtils.createJarSignature(orgData);
    NamedPropertySource actual = JarValidationUtils.createJarSignature(cpyData);
    JarValidationUtils.validateJarSignature(expected, actual);

    if (logger.isDebugEnabled()) {
        for (String name : expected.getAvailableNames()) {
            String digestString = expected.getProperty(name);
            byte[] digestValue = DatatypeConverter.parseBase64Binary(digestString);
            logger.debug("    " + name + ": " + DatatypeConverter.printHexBinary(digestValue));
        }
    }
}

From source file:org.apache.nifi.processors.splunk.GetSplunk.java

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final long currentTime = System.currentTimeMillis();

    synchronized (isInitialized) {
        if (!isInitialized.get()) {
            splunkService = createSplunkService(context);
            isInitialized.set(true);/* w w w .  j av a2 s. co m*/
        }
    }

    final String query = context.getProperty(QUERY).getValue();
    final String outputMode = context.getProperty(OUTPUT_MODE).getValue();
    final String timeRangeStrategy = context.getProperty(TIME_RANGE_STRATEGY).getValue();
    final String timeZone = context.getProperty(TIME_ZONE).getValue();
    final String timeFieldStrategy = context.getProperty(TIME_FIELD_STRATEGY).getValue();

    final JobExportArgs exportArgs = new JobExportArgs();
    exportArgs.setSearchMode(JobExportArgs.SearchMode.NORMAL);
    exportArgs.setOutputMode(JobExportArgs.OutputMode.valueOf(outputMode));

    String earliestTime = null;
    String latestTime = null;

    if (PROVIDED_VALUE.getValue().equals(timeRangeStrategy)) {
        // for provided we just use the values of the properties
        earliestTime = context.getProperty(EARLIEST_TIME).getValue();
        latestTime = context.getProperty(LATEST_TIME).getValue();
    } else {
        try {
            // not provided so we need to check the previous state
            final TimeRange previousRange = loadState(context.getStateManager());
            final SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_TIME_FORMAT);
            dateFormat.setTimeZone(TimeZone.getTimeZone(timeZone));

            if (previousRange == null) {
                // no previous state so set the earliest time based on the strategy
                if (MANAGED_CURRENT_VALUE.getValue().equals(timeRangeStrategy)) {
                    earliestTime = dateFormat.format(new Date(currentTime));
                }

                // no previous state so set the latest time to the current time
                latestTime = dateFormat.format(new Date(currentTime));

                // if its the first time through don't actually run, just save the state to get the
                // initial time saved and next execution will be the first real execution
                if (latestTime.equals(earliestTime)) {
                    saveState(context.getStateManager(), new TimeRange(earliestTime, latestTime));
                    return;
                }

            } else {
                // we have previous state so set earliestTime to (latestTime + 1) of last range
                try {
                    final String previousLastTime = previousRange.getLatestTime();
                    final Date previousLastDate = dateFormat.parse(previousLastTime);

                    earliestTime = dateFormat.format(new Date(previousLastDate.getTime() + 1));
                    latestTime = dateFormat.format(new Date(currentTime));
                } catch (ParseException e) {
                    throw new ProcessException(e);
                }
            }

        } catch (IOException e) {
            getLogger().error("Unable to load data from State Manager due to {}",
                    new Object[] { e.getMessage() }, e);
            context.yield();
            return;
        }
    }

    if (!StringUtils.isBlank(earliestTime)) {
        if (EVENT_TIME_VALUE.getValue().equalsIgnoreCase(timeFieldStrategy)) {
            exportArgs.setEarliestTime(earliestTime);
        } else {
            exportArgs.setIndexEarliest(earliestTime);
        }
    }

    if (!StringUtils.isBlank(latestTime)) {
        if (EVENT_TIME_VALUE.getValue().equalsIgnoreCase(timeFieldStrategy)) {
            exportArgs.setLatestTime(latestTime);
        } else {
            exportArgs.setIndexLatest(latestTime);
        }
    }

    if (EVENT_TIME_VALUE.getValue().equalsIgnoreCase(timeFieldStrategy)) {
        getLogger().debug("Using earliest_time of {} and latest_time of {}",
                new Object[] { earliestTime, latestTime });
    } else {
        getLogger().debug("Using index_earliest of {} and index_latest of {}",
                new Object[] { earliestTime, latestTime });
    }

    final InputStream exportSearch = splunkService.export(query, exportArgs);

    FlowFile flowFile = session.create();
    flowFile = session.write(flowFile, new OutputStreamCallback() {
        @Override
        public void process(OutputStream rawOut) throws IOException {
            try (BufferedOutputStream out = new BufferedOutputStream(rawOut)) {
                IOUtils.copyLarge(exportSearch, out);
            }
        }
    });

    final Map<String, String> attributes = new HashMap<>(3);
    attributes.put(EARLIEST_TIME_ATTR, earliestTime);
    attributes.put(LATEST_TIME_ATTR, latestTime);
    attributes.put(QUERY_ATTR, query);
    flowFile = session.putAllAttributes(flowFile, attributes);

    session.getProvenanceReporter().receive(flowFile, transitUri);
    session.transfer(flowFile, REL_SUCCESS);
    getLogger().debug("Received {} from Splunk", new Object[] { flowFile });

    // save the time range for the next execution to pick up where we left off
    // if saving fails then roll back the session so we can try again next execution
    // only need to do this for the managed time strategies
    if (!PROVIDED_VALUE.getValue().equals(timeRangeStrategy)) {
        try {
            saveState(context.getStateManager(), new TimeRange(earliestTime, latestTime));
        } catch (IOException e) {
            getLogger().error("Unable to load data from State Manager due to {}",
                    new Object[] { e.getMessage() }, e);
            session.rollback();
            context.yield();
        }
    }
}

From source file:org.apache.olingo.fit.utils.XMLElement.java

public void setContent(final InputStream content) throws IOException {
    this.content.reset();

    final InputStreamReader reader = new InputStreamReader(content, Constants.ENCODING);
    final OutputStreamWriter writer = new OutputStreamWriter(this.content, Constants.ENCODING);
    IOUtils.copyLarge(reader, writer);

    writer.flush();/*from  w ww . j a va  2s .  co m*/
    IOUtils.closeQuietly(reader);
    IOUtils.closeQuietly(writer);
    IOUtils.closeQuietly(content);
}

From source file:org.apache.padaf.preflight.ExtractStream.java

public static void main(String[] args) throws Exception {
    if (args.length != 3) {
        System.err.println("usage : ExtractStream file objNum objGen");
    }/*from w w  w  .  ja va2 s .c  om*/
    PDDocument document = PDDocument.load(new FileInputStream(args[0]));
    COSObject obj = document.getDocument()
            .getObjectFromPool(new COSObjectKey(Integer.parseInt(args[1]), Integer.parseInt(args[2])));
    if (obj.getObject() instanceof COSStream) {
        COSStream stream = (COSStream) obj.getObject();
        InputStream is = stream.getUnfilteredStream();
        FileOutputStream out = new FileOutputStream("stream.out");
        IOUtils.copyLarge(is, out);
        IOUtils.closeQuietly(out);
    }
}

From source file:org.apache.padaf.preflight.font.AbstractFontValidator.java

/**
 * Type0, Type1 and TrueType FontValidatir call this method to check the
 * FontFile meta data./*from w ww.  ja  v  a 2 s  .c om*/
 * 
 * @param fontDesc
 *          The FontDescriptor which contains the FontFile stream
 * @param fontFile
 *          The font file stream to check
 * @return true if the meta data is valid, false otherwise
 * @throws ValidationException when checking fails
 */
protected boolean checkFontFileMetaData(PDFontDescriptor fontDesc, PDStream fontFile)
        throws ValidationException {
    PDMetadata metadata = null;
    try {
        metadata = fontFile.getMetadata();
    } catch (IllegalStateException e) {
        fontContainer.addError(new ValidationError(ValidationConstants.ERROR_METADATA_FORMAT_UNKOWN,
                "The Metadata entry doesn't reference a stream object"));
        return false;
    }
    if (metadata != null) {
        // --- Filters are forbidden in a XMP stream
        if (metadata.getFilters() != null && !metadata.getFilters().isEmpty()) {
            fontContainer.addError(new ValidationError(ValidationConstants.ERROR_SYNTAX_STREAM_INVALID_FILTER,
                    "Filter specified in font file metadata dictionnary"));
            return false;
        }

        // --- extract the meta data content
        byte[] mdAsBytes = null;
        try {
            ByteArrayOutputStream bos = new ByteArrayOutputStream();
            InputStream metaDataContent = metadata.createInputStream();
            IOUtils.copyLarge(metaDataContent, bos);
            IOUtils.closeQuietly(metaDataContent);
            IOUtils.closeQuietly(bos);
            mdAsBytes = bos.toByteArray();
        } catch (IOException e) {
            throw new ValidationException("Unable to read font metadata due to : " + e.getMessage(), e);
        }

        try {

            XMPDocumentBuilder xmpBuilder = new XMPDocumentBuilder();
            XMPMetadata xmpMeta = xmpBuilder.parse(mdAsBytes);

            FontMetaDataValidation fontMDval = new FontMetaDataValidation();
            List<ValidationError> ve = new ArrayList<ValidationError>();
            boolean isVal = fontMDval.analyseFontName(xmpMeta, fontDesc, ve);
            isVal = isVal & fontMDval.analyseRights(xmpMeta, fontDesc, ve);
            for (ValidationError validationError : ve) {
                fontContainer.addError(validationError);
            }
            return isVal;

        } catch (XmpUnknownValueTypeException e) {
            fontContainer.addError(
                    new ValidationError(ValidationConstants.ERROR_METADATA_UNKNOWN_VALUETYPE, e.getMessage()));
            return false;
        } catch (XmpParsingException e) {
            fontContainer
                    .addError(new ValidationError(ValidationConstants.ERROR_METADATA_FORMAT, e.getMessage()));
            return false;
        } catch (XmpSchemaException e) {
            fontContainer
                    .addError(new ValidationError(ValidationConstants.ERROR_METADATA_FORMAT, e.getMessage()));
            return false;
        } catch (XmpExpectedRdfAboutAttribute e) {
            fontContainer.addError(new ValidationError(
                    ValidationConstants.ERROR_METADATA_RDF_ABOUT_ATTRIBUTE_MISSING, e.getMessage()));
            return false;
        } catch (BadFieldValueException e) {
            fontContainer.addError(new ValidationError(
                    ValidationConstants.ERROR_METADATA_CATEGORY_PROPERTY_INVALID, e.getMessage()));
            return false;
        } catch (XmpXpacketEndException e) {
            throw new ValidationException("Unable to parse font metadata due to : " + e.getMessage(), e);
        }
    }

    // --- No MetaData, valid
    return true;
}

From source file:org.apache.padaf.xmpbox.parser.XMPDocumentBuilder.java

private byte[] getStreamAsByteArray(InputStream input) throws XmpParsingException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    try {// ww  w.  j a v  a2s  . co  m
        IOUtils.copyLarge(input, bos);
    } catch (IOException e) {
        throw new XmpParsingException("An error has occured when processing the underlying XMP source", e);
    } finally {
        IOUtils.closeQuietly(bos);
        IOUtils.closeQuietly(input);
    }
    return bos.toByteArray();
}

From source file:org.apache.pdfbox.preflight.font.descriptor.FontDescriptorHelper.java

protected final byte[] getMetaDataStreamAsBytes(PDMetadata metadata) {
    byte[] result = null;
    ByteArrayOutputStream bos = null;
    InputStream metaDataContent = null;
    try {/*from  w  ww.  j  av a2 s.  co m*/
        bos = new ByteArrayOutputStream();
        metaDataContent = metadata.createInputStream();
        IOUtils.copyLarge(metaDataContent, bos);
        result = bos.toByteArray();
    } catch (IOException e) {
        this.fContainer.push(new ValidationError(ERROR_METADATA_FORMAT_STREAM,
                this.font.getName() + ": Unable to read font metadata due to : " + e.getMessage(), e));
    } finally {
        IOUtils.closeQuietly(metaDataContent);
        IOUtils.closeQuietly(bos);
    }
    return result;
}

From source file:org.apache.sentry.tests.e2e.solr.AbstractSolrSentryTestBase.java

/**
 * Make a raw http request to specific cluster node.  Node is of the format
 * host:port/context, i.e. "localhost:8983/solr"
 *//*from  w w  w.j a va 2s.  com*/
protected String makeHttpRequest(CloudSolrServer server, String node, String httpMethod, String path,
        byte[] content, String contentType) throws Exception {
    HttpClient httpClient = server.getLbServer().getHttpClient();
    URI uri = new URI("http://" + node + path);
    HttpRequestBase method = null;
    if ("GET".equals(httpMethod)) {
        method = new HttpGet(uri);
    } else if ("HEAD".equals(httpMethod)) {
        method = new HttpHead(uri);
    } else if ("POST".equals(httpMethod)) {
        method = new HttpPost(uri);
    } else if ("PUT".equals(httpMethod)) {
        method = new HttpPut(uri);
    } else {
        throw new IOException("Unsupported method: " + method);
    }

    if (method instanceof HttpEntityEnclosingRequestBase) {
        HttpEntityEnclosingRequestBase entityEnclosing = (HttpEntityEnclosingRequestBase) method;
        ByteArrayEntity entityRequest = new ByteArrayEntity(content);
        entityRequest.setContentType(contentType);
        entityEnclosing.setEntity(entityRequest);
    }

    HttpEntity httpEntity = null;
    boolean success = false;
    String retValue = "";
    try {
        final HttpResponse response = httpClient.execute(method);
        int httpStatus = response.getStatusLine().getStatusCode();
        httpEntity = response.getEntity();

        if (httpEntity != null) {
            InputStream is = httpEntity.getContent();
            ByteArrayOutputStream os = new ByteArrayOutputStream();
            try {
                IOUtils.copyLarge(is, os);
                os.flush();
            } finally {
                IOUtils.closeQuietly(os);
                IOUtils.closeQuietly(is);
            }
            retValue = os.toString();
        }
        success = true;
    } finally {
        if (!success) {
            EntityUtils.consumeQuietly(httpEntity);
            method.abort();
        }
    }
    return retValue;
}