Example usage for org.springframework.web.servlet.mvc.method.annotation StreamingResponseBody writeTo

List of usage examples for org.springframework.web.servlet.mvc.method.annotation StreamingResponseBody writeTo

Introduction

In this page you can find the example usage for org.springframework.web.servlet.mvc.method.annotation StreamingResponseBody writeTo.

Prototype

void writeTo(OutputStream outputStream) throws IOException;

Source Link

Document

A callback for writing to the response body.

Usage

From source file:org.talend.dataprep.command.CommandHelperTest.java

@Test
public void testCommandToStreamingWithNoHeader() throws Exception {
    HystrixCommand<InputStream> command = new CommandHelperTestCommand();

    final StreamingResponseBody responseBody = CommandHelper.toStreaming(command);
    final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    responseBody.writeTo(outputStream);
    assertEquals("test", new String(outputStream.toByteArray()));
}

From source file:org.talend.dataprep.transformation.service.TransformationService.java

/**
 * Compute the given aggregation.//from w w w  .  j  a v a  2  s .com
 *
 * @param rawParams the aggregation rawParams as body rawParams.
 */
// @formatter:off
@RequestMapping(value = "/aggregate", method = POST, produces = APPLICATION_JSON_VALUE, consumes = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Compute the aggregation according to the request body rawParams", consumes = APPLICATION_JSON_VALUE)
@VolumeMetered
public AggregationResult aggregate(
        @ApiParam(value = "The aggregation rawParams in json") @RequestBody final String rawParams) {
    // @formatter:on

    // parse the aggregation parameters
    final AggregationParameters parameters;
    try {
        parameters = mapper.readerFor(AggregationParameters.class).readValue(rawParams);
        LOG.debug("Aggregation requested {}", parameters);
    } catch (IOException e) {
        throw new TDPException(CommonErrorCodes.BAD_AGGREGATION_PARAMETERS, e);
    }

    InputStream contentToAggregate;

    // get the content of the preparation (internal call with piped streams)
    if (StringUtils.isNotBlank(parameters.getPreparationId())) {
        try {
            PipedOutputStream temp = new PipedOutputStream();
            contentToAggregate = new PipedInputStream(temp);

            // because of piped streams, processing must be asynchronous
            Runnable r = () -> {
                try {
                    final ExportParameters exportParameters = new ExportParameters();
                    exportParameters.setPreparationId(parameters.getPreparationId());
                    exportParameters.setDatasetId(parameters.getDatasetId());
                    if (parameters.getFilter() != null) {
                        exportParameters.setFilter(mapper.readTree(parameters.getFilter()));
                    }
                    exportParameters.setExportType(JSON);
                    exportParameters.setStepId(parameters.getStepId());

                    final StreamingResponseBody body = executeSampleExportStrategy(exportParameters);
                    body.writeTo(temp);
                } catch (IOException e) {
                    throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
                }
            };
            executor.execute(r);
        } catch (IOException e) {
            throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
        }
    } else {
        final DataSetGet dataSetGet = context.getBean(DataSetGet.class, parameters.getDatasetId(), false, true);
        contentToAggregate = dataSetGet.execute();
    }

    // apply the aggregation
    try (JsonParser parser = mapper.getFactory().createParser(contentToAggregate)) {
        final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
        return aggregationService.aggregate(parameters, dataSet);
    } catch (IOException e) {
        throw new TDPException(CommonErrorCodes.UNABLE_TO_PARSE_JSON, e);
    } finally {
        // don't forget to release the connection
        if (contentToAggregate != null) {
            try {
                contentToAggregate.close();
            } catch (IOException e) {
                LOG.warn("Could not close dataset input stream while aggregating", e);
            }
        }
    }
}

From source file:org.talend.dataprep.transformation.service.TransformationService.java

/**
 * Add the following preparation in cache.
 *
 * @param preparation the preparation to cache.
 * @param stepId the preparation step id.
 *///from   w  w w  . jav  a 2 s .  c  o m
private void addPreparationInCache(Preparation preparation, String stepId) {
    final ExportParameters exportParameters = new ExportParameters();
    exportParameters.setPreparationId(preparation.getId());
    exportParameters.setExportType("JSON");
    exportParameters.setStepId(stepId);
    exportParameters.setDatasetId(preparation.getDataSetId());

    final StreamingResponseBody streamingResponseBody = executeSampleExportStrategy(exportParameters);
    try {
        // the result is not important here as it will be cached !
        streamingResponseBody.writeTo(new NullOutputStream());
    } catch (IOException e) {
        throw new TDPException(UNEXPECTED_EXCEPTION, e);
    }
}