Example usage for org.springframework.http HttpHeaders setContentType

List of usage examples for org.springframework.http HttpHeaders setContentType

Introduction

In this page you can find the example usage for org.springframework.http HttpHeaders setContentType.

Prototype

public void setContentType(@Nullable MediaType mediaType) 

Source Link

Document

Set the MediaType media type of the body, as specified by the Content-Type header.

Usage

From source file:org.cloudfoundry.identity.uaa.integration.ClientAdminEndpointsIntegrationTests.java

public HttpHeaders getAuthenticatedHeaders(OAuth2AccessToken token) {
    HttpHeaders headers = new HttpHeaders();
    headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON));
    headers.setContentType(MediaType.APPLICATION_JSON);
    headers.set("Authorization", "Bearer " + token.getValue());
    return headers;
}

From source file:com.opopov.cloud.image.service.ImageStitchingServiceImpl.java

@Override
public DeferredResult<ResponseEntity<?>> getStitchedImage(@RequestBody ImageStitchingConfiguration config) {

    validator.validateConfig(config);//from  w  w w  . j  ava2 s.  c  o  m

    List<ListenableFuture<ResponseEntity<byte[]>>> futures = config.getUrlList().stream()
            .map(url -> remoteResource.getForEntity(url, byte[].class)).collect(Collectors.toList());

    //wrap the listenable futures into the completable futures
    //writing loop in pre-8 style, since it would be more concise compared to stream api in this case
    CompletableFuture[] imageFutures = new CompletableFuture[futures.size()];
    int taskIndex = 0;
    IndexMap indexMap = new IndexMap(config.getRowCount() * config.getColumnCount());
    for (ListenableFuture<ResponseEntity<byte[]>> f : futures) {
        imageFutures[taskIndex] = imageDataFromResponse(taskIndex, indexMap, utils.fromListenableFuture(f));
        taskIndex++;
    }

    CompletableFuture<Void> allDownloadedAndDecompressed = CompletableFuture.allOf(imageFutures);

    //Synchronous part - start - writing decompressed bytes to the large image
    final int DOWNLOAD_AND_DECOMPRESS_TIMEOUT = 30; //30 seconds for each of the individual tasks
    DeferredResult<ResponseEntity<?>> response = new DeferredResult<>();
    boolean allSuccessful = false;
    byte[] imageBytes = null;
    try {
        Void finishResult = allDownloadedAndDecompressed.get(DOWNLOAD_AND_DECOMPRESS_TIMEOUT, TimeUnit.SECONDS);

        imageBytes = combineImagesIntoStitchedImage(config, indexMap);

        HttpHeaders headers = new HttpHeaders();
        headers.setCacheControl(CacheControl.noCache().getHeaderValue());
        headers.setContentType(MediaType.IMAGE_JPEG);
        allSuccessful = true;
    } catch (InterruptedException | ExecutionException e) {
        // basically either download or decompression of the source image failed
        // just skip it then, we have no image to show
        response.setErrorResult(
                new SourceImageLoadException("Unable to load and decode one or more source images", e));
    } catch (TimeoutException e) {
        //send timeout response, via ImageLoadTimeoutException
        response.setErrorResult(new ImageLoadTimeoutException(
                String.format("Some of the images were not loaded and decoded before timeout of %d seconds",
                        DOWNLOAD_AND_DECOMPRESS_TIMEOUT),
                e

        ));
    } catch (IOException e) {
        response.setErrorResult(new ImageWriteException("Error writing image into output buffer", e));
    }

    //Synchronous part - end

    if (!allSuccessful) {
        //shoud not get here, some unknown error
        response.setErrorResult(
                new ImageLoadTimeoutException("Unknown error", new RuntimeException("Something went wrong")

                ));

        return response;
    }

    ResponseEntity<?> successResult = ResponseEntity.ok(imageBytes);
    response.setResult(successResult);

    return response;

}

From source file:org.n52.tamis.rest.forward.processes.execute.ExecuteRequestForwarder.java

/**
 * {@inheritDoc} <br/>//w w w  .  j a va  2  s .com
 * <br/>
 * Delegates an incoming execute request to the WPS proxy.
 * 
 * Has two possible return values depending on the type of execution
 * (synchronous or asynchronous)! See return description.
 * 
 * @param parameterValueStore
 *            must contain the URL variable
 *            {@link URL_Constants_TAMIS#SERVICE_ID_VARIABLE_NAME} to
 *            identify the WPS instance and
 *            {@link URL_Constants_TAMIS#PROCESS_ID_VARIABLE_NAME} to
 *            identify the process
 * 
 * @param requestBody
 *            must be an instance of {@link Execute_HttpPostBody}
 * @return either a String value representing the location header to the
 *         created job instance (in case of asynchronous execution)
 *         <b>OR</b> an instance of {@link ResultDocument} (in case of
 *         synchronous execution).
 * @throws IOException
 */
@Override
public Object forwardRequestToWpsProxy(HttpServletRequest request, Object requestBody,
        ParameterValueStore parameterValueStore) throws IOException {
    initializeRequestSpecificParameters(parameterValueStore);

    Execute_HttpPostBody executeBody = null;

    /*
     * requestBody must be an instance of Execute_HttpPostBody
     */
    if (requestBody instanceof Execute_HttpPostBody)
        executeBody = (Execute_HttpPostBody) requestBody;
    else
        logger.error(
                "Request body was expected to be an instance of \"{}\", but was \"{}\". NullPointerException might occur.",
                Execute_HttpPostBody.class, requestBody.getClass());

    // add process Id, since it is not included in the received execute
    // body, but is needed
    executeBody.setProcessId(this.getProcessId());

    sosRequestConstructor.constructSosGetObservationRequestsForInputs(executeBody);

    String execute_url_wpsProxy = createTargetURL_WpsProxy(request);

    /*
     * To guarantee the existence of the parameter "sync-execute" in the
     * request-object, the parameter has been added as an attribute to the
     * request.
     */
    boolean syncExecute_parameter = (boolean) request
            .getAttribute(ExecuteProcessController.SYNC_EXECUTE_PARAMETER_NAME);
    execute_url_wpsProxy = append_syncExecute_parameter(syncExecute_parameter, execute_url_wpsProxy);

    /*
     * forward execute request to WPS proxy.
     * 
     * depending on the request parameter "sync-execute" the call should be
     * realized asynchronously or synchronously.
     */
    URI createdJobUri_wpsProxy = null;

    /**
     * content headers!
     */
    HttpHeaders headers = new HttpHeaders();
    headers.setContentType(MediaType.APPLICATION_JSON);
    headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON));

    if (syncExecute_parameter) {
        // synchronous
        RestTemplate synchronousExecuteTemplate = new RestTemplate();

        /*
         * execute the POST request synchronously
         * 
         * the return value will be a result document of the newly created
         * resource. Thus, we have to extract the jobId from it to manually
         * build the location header)
         * 
         */

        HttpEntity requestEntity = new HttpEntity(executeBody, headers);

        ResultDocument resultDocument = synchronousExecuteTemplate.postForObject(execute_url_wpsProxy,
                requestEntity, ResultDocument.class);

        /*
         * the WPS is not conceptualized to offer a StatusRequest against a
         * job that has been executed synchronously. Hence, any jobID-URL
         * pointing to a synchronous job will fail (and result in a Bad
         * Request error or syntax error)
         * 
         * Hence, we will simply return the ResultDocument!
         */

        return resultDocument;
    } else {

        /*
         * Proceed similar to synchronous, since I just call the WPS proxy
         * with different sync-execute parameter;
         * 
         * In opposite to synchronous call, will receive and return the
         * location header of the newly created job instance
         */

        RestTemplate asynchronousExecuteTemplate = new RestTemplate();

        createdJobUri_wpsProxy = asynchronousExecuteTemplate.postForLocation(execute_url_wpsProxy, executeBody);

        /*
         * from the result of the execution request against the WPS proxy,
         * extract the location header and return it.
         * 
         * the location header points to an URL specific for the WPS proxy!
         * 
         * What we need is the URL pointing to THIS applications resource.
         * Hence, we must adapt the URL! --> Basically we have to extract the
         * job ID and append it to the standard URL path of THIS application.
         * 
         * createdJobUrl_wpsProxy looks like "<baseUrl-wpsProxy>/processes/{processId}/jobs/{jobId}"
         */
        String jobId = createdJobUri_wpsProxy.getPath().split(URL_Constants_WpsProxy.SLASH_JOBS + "/")[1];

        /*
         * target job URL should look like: "<base-url-tamis>/services/{serviceId}/processes/{processId}/jobs/{jobId}"
         */

        String createdJobUrl = request.getRequestURL().toString();
        createdJobUrl = createdJobUrl + URL_Constants_TAMIS.SLASH_JOBS + "/" + jobId;

        return createdJobUrl;
    }
}

From source file:com.laishidua.mobilecloud.ghostmyselfie.controller.GhostMySelfieController.java

/**
 * /*from   w  ww .j av  a 2 s  .c  o  m*/
 * @param id       : the id of the ghostmyselfie to associate this data stream with
 * @param ghostMyselfieData : the data stream
 * @param response   : http response, exposed to allow manipulation like setting
 *                   error codes
 * @return         : a GhostMySelfieStatus object if successful, null otherwise
 * @throws IOException
 */
@RequestMapping(value = GhostMySelfieSvcApi.GHOSTMYSELFIE_DATA_PATH, method = RequestMethod.POST)
public ResponseEntity<GhostMySelfieStatus> setGhostMySelfieData(
        @PathVariable(GhostMySelfieSvcApi.ID_PARAMETER) long id,
        @RequestPart(GhostMySelfieSvcApi.DATA_PARAMETER) MultipartFile ghostMySelfieData,
        HttpServletResponse response, Principal p) throws IOException {
    HttpHeaders responseHeaders = new HttpHeaders();
    responseHeaders.setContentType(MediaType.APPLICATION_JSON);
    GhostMySelfie gms = ghostMySelphie.findOne(id);
    if (gms != null) {
        if (p.getName().equals(gms.getOwner())) {
            String mimeType = URLConnection.guessContentTypeFromStream(ghostMySelfieData.getInputStream());
            if (!mimeType.equals("image/jpeg") && !mimeType.equals("image/png")) {
                response.sendError(400, "Just jpg, jpeg and png images supported");
                return null;
            }
            ghostMySelfieDataMgr = GhostMySelfieFileManager.get();
            saveSomeGhostMySelfie(gms, ghostMySelfieData);
            return new ResponseEntity<GhostMySelfieStatus>(
                    new GhostMySelfieStatus(GhostMySelfieStatus.GhostMySelfieState.READY), responseHeaders,
                    HttpStatus.CREATED);
        } else {
            response.sendError(400, "Not your Selfie, hands off!");
            return null;
        }
    } else {
        response.sendError(404, "Your Selfie is in another castle.");
        return null;
    }
}

From source file:com.orange.cepheus.cep.EventSinkListenerTest.java

/**
 * Check that an updateContext is fired when a new event bean arrives
 *///from  w  w w  . j  av a2  s  .c  o  m
@Test
public void postMessageOnEventUpdate() {

    HttpHeaders httpHeaders = new HttpHeaders();
    httpHeaders.setContentType(MediaType.APPLICATION_JSON);
    httpHeaders.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));

    when(statement.getText()).thenReturn("statement");
    when(ngsiClient.getRequestHeaders(any())).thenReturn(httpHeaders);

    // Trigger event update
    List<ContextAttribute> attributes = new LinkedList<>();
    attributes.add(new ContextAttribute("id", "string", "OUT1234"));
    attributes.add(new ContextAttribute("avgTemp", "double", 10.25));
    attributes.add(new ContextAttribute("avgTemp_unit", "string", "celcius"));
    EventBean[] beans = { buildEventBean("TempSensorAvg", attributes) };
    eventSinkListener.update(beans, null, statement, provider);

    // Capture updateContext when postUpdateContextRequest is called on updateContextRequest,
    ArgumentCaptor<UpdateContext> updateContextArg = ArgumentCaptor.forClass(UpdateContext.class);
    ArgumentCaptor<HttpHeaders> headersArg = ArgumentCaptor.forClass(HttpHeaders.class);

    verify(ngsiClient).updateContext(eq(broker.getUrl()), headersArg.capture(), updateContextArg.capture());

    // Check updateContext is valid
    UpdateContext updateContext = updateContextArg.getValue();
    assertEquals(UpdateAction.APPEND, updateContext.getUpdateAction());
    assertEquals(1, updateContext.getContextElements().size());

    // Check headers are valid
    HttpHeaders headers = headersArg.getValue();
    assertEquals(MediaType.APPLICATION_JSON, headers.getContentType());
    assertTrue(headers.getAccept().contains(MediaType.APPLICATION_JSON));
    assertEquals("SN", headers.getFirst("Fiware-Service"));
    assertEquals("SP", headers.getFirst("Fiware-ServicePath"));
    assertEquals("AUTH_TOKEN", headers.getFirst("X-Auth-Token"));

    ContextElement contextElement = updateContext.getContextElements().get(0);
    assertEquals("OUT1234", contextElement.getEntityId().getId());
    assertEquals("TempSensorAvg", contextElement.getEntityId().getType());
    assertFalse(contextElement.getEntityId().getIsPattern());
    assertEquals(1, contextElement.getContextAttributeList().size());

    ContextAttribute attr = contextElement.getContextAttributeList().get(0);
    assertEquals("avgTemp", attr.getName());
    assertEquals("double", attr.getType());
    assertEquals(10.25, attr.getValue());
    assertEquals(1, attr.getMetadata().size());
    assertEquals("unit", attr.getMetadata().get(0).getName());
    assertEquals("string", attr.getMetadata().get(0).getType());
    assertEquals("celcius", attr.getMetadata().get(0).getValue());
}

From source file:gateway.controller.JobController.java

/**
 * Repeats a Job that has previously been submitted to Piazza. This will spawn a new Job with new corresponding Id.
 * //from  ww  w .ja va  2  s .co  m
 * @see http://pz-swagger.stage.geointservices.io/#!/Job/put_job_jobId
 * 
 * @param jobId
 *            The Id of the Job to repeat.
 * @param user
 *            User information
 * @return Response containing the Id of the newly created Job, or appropriate error
 */
@RequestMapping(value = "/job/{jobId}", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseStatus(HttpStatus.CREATED)
@ApiOperation(value = "Repeat Job", notes = "Repeats a previously submitted Job. This will clone the original Job, and run it again with identical parameters, using the requesting users authentication in the new Job.", tags = "Job")
@ApiResponses(value = {
        @ApiResponse(code = 201, message = "A new Job Id that corresponds to the cloned Job in Piazza.", response = JobResponse.class),
        @ApiResponse(code = 400, message = "Bad Request", response = ErrorResponse.class),
        @ApiResponse(code = 401, message = "Unauthorized", response = ErrorResponse.class),
        @ApiResponse(code = 404, message = "Not Found", response = ErrorResponse.class),
        @ApiResponse(code = 500, message = "Internal Error", response = ErrorResponse.class) })
public ResponseEntity<PiazzaResponse> repeatJob(
        @ApiParam(value = "Id of the Job to Repeat", required = true) @PathVariable(value = "jobId") String jobId,
        Principal user) {
    try {
        // Log the request
        logger.log(
                String.format("User %s requested to Repeat Job %s", gatewayUtil.getPrincipalName(user), jobId),
                PiazzaLogger.INFO);
        // Create the Request Object from the input parameters
        PiazzaJobRequest request = new PiazzaJobRequest();
        request.createdBy = gatewayUtil.getPrincipalName(user);
        request.jobType = new RepeatJob(jobId);
        // Proxy the request to the Job Manager
        HttpHeaders headers = new HttpHeaders();
        headers.setContentType(MediaType.APPLICATION_JSON);
        HttpEntity<PiazzaJobRequest> entity = new HttpEntity<PiazzaJobRequest>(request, headers);
        try {
            return new ResponseEntity<PiazzaResponse>(restTemplate
                    .postForEntity(String.format("%s/%s", JOBMANAGER_URL, "repeat"), entity, JobResponse.class)
                    .getBody(), HttpStatus.CREATED);
        } catch (HttpClientErrorException | HttpServerErrorException hee) {
            LOGGER.error("Error Repeating Job", hee);
            return new ResponseEntity<PiazzaResponse>(
                    gatewayUtil.getErrorResponse(hee.getResponseBodyAsString()), hee.getStatusCode());
        }
    } catch (Exception exception) {
        String error = String.format("Error Repeating Job Id %s: %s", jobId, exception.getMessage());
        LOGGER.error(error, exception);
        logger.log(error, PiazzaLogger.ERROR);
        return new ResponseEntity<PiazzaResponse>(new ErrorResponse(error, "Gateway"),
                HttpStatus.INTERNAL_SERVER_ERROR);
    }
}

From source file:org.apigw.authserver.web.controller.CertifiedClientsController.java

@RequestMapping(value = "/oauth/clients/{clientId}/icon", method = RequestMethod.GET)
public ResponseEntity<?> getClientIcon(@PathVariable String clientId) {
    log.debug("getClientIcon");
    log.debug("Trying to load client icon");
    CertifiedClientIcon icon = clientDetailsService.findClientIconByClientId(clientId);
    HttpHeaders headers = new HttpHeaders();
    if (icon == null) {
        headers.setContentType(MediaType.TEXT_PLAIN);
        log.debug("getClientIcon: no icon found");
        return new ResponseEntity<String>("No icon found for client with clientId: " + clientId, headers,
                HttpStatus.NOT_FOUND);/*from   w  w  w.jav  a 2s.c o  m*/
    } else {
        headers.setContentType(MediaType.valueOf(icon.getContentType()));
        log.debug("getClientIcon: returning with icon");
        return new ResponseEntity<byte[]>(icon.getIcon(), headers, HttpStatus.OK);
    }
}

From source file:de.tudarmstadt.ukp.clarin.webanno.crowdflower.CrowdClient.java

/**
 * Upload the data vector as JSON to the specified Crowdflower job
 * @param job//from   w  w w .  j a va  2 s .  c om
 * @param data - Generic vector of data, containing ordinary java classes.
 * They should be annotated so that Jackson understands how to map them to JSON.
 *
 */

void uploadData(CrowdJob job, List<?> data) {
    Log LOG = LogFactory.getLog(getClass());

    //Crowdflower wants a Multi-line JSON, with each line having a new JSON object
    //Thus we have to map each (raw) object in data individually to a JSON string

    ObjectMapper mapper = new ObjectMapper();
    String jsonObjectCollection = "";

    StringBuilder jsonStringBuilder = new StringBuilder();
    int count = 0;
    for (Object obj : data) {
        count++;
        JsonNode jsonData = mapper.convertValue(obj, JsonNode.class);
        jsonStringBuilder.append(jsonData.toString());
        jsonStringBuilder.append("\n");
    }

    jsonObjectCollection = jsonStringBuilder.toString();

    RestTemplate restTemplate = new RestTemplate();
    restTemplate.getMessageConverters().add(new StringHttpMessageConverter());

    HttpHeaders headers = new HttpHeaders();
    headers.setContentType(MediaType.APPLICATION_JSON);
    HttpEntity<String> request = new HttpEntity<String>(jsonObjectCollection, headers);

    String result = "";

    if (job == null) {
        LOG.info("Upload new data and create new job: " + String.valueOf(count) + " data items");
        result = restTemplate.postForObject(uploadDataURL, request, String.class, apiKey);
    } else {
        LOG.info("Uploading new data to job: " + job.getId() + ": " + String.valueOf(count) + " data items");
        result = restTemplate.postForObject(uploadDataWithJobURL, request, String.class, job.getId(), apiKey);
    }
    LOG.info("Upload response:" + result);

    //set gold? this is what i would like to do...
    //updateVariable(job, "https://api.crowdflower.com/v1/jobs/{jobid}/gold?key={apiKey}", "set_standard_gold", "TRUE");
}

From source file:business.services.FileService.java

public HttpEntity<InputStreamResource> downloadAccessLog(String filename,
        boolean writeContentDispositionHeader) {
    try {/*from   www . ja  v  a  2 s . com*/
        FileSystem fileSystem = FileSystems.getDefault();
        Path path = fileSystem.getPath(accessLogsPath).normalize();
        filename = filename.replace(fileSystem.getSeparator(), "_");
        filename = URLEncoder.encode(filename, "utf-8");

        Path f = fileSystem.getPath(accessLogsPath, filename).normalize();
        // filter path names that point to places outside the logs path.
        // E.g., to prevent that in cases where clients use '../' in the filename
        // arbitrary locations are reachable.
        if (!Files.isSameFile(path, f.getParent())) {
            // Path f is not in the upload path. Maybe 'name' contains '..'?
            log.error("Invalid filename: " + filename);
            throw new FileDownloadError("Invalid file name");
        }
        if (!Files.isReadable(f)) {
            log.error("File does not exist: " + filename);
            throw new FileDownloadError("File does not exist");
        }

        InputStream input = new FileInputStream(f.toFile());
        InputStreamResource resource = new InputStreamResource(input);
        HttpHeaders headers = new HttpHeaders();
        headers.setContentType(MediaType.TEXT_PLAIN);
        if (writeContentDispositionHeader) {
            headers.set("Content-Disposition", "attachment; filename=" + filename.replace(" ", "_"));
        }
        HttpEntity<InputStreamResource> response = new HttpEntity<InputStreamResource>(resource, headers);
        return response;
    } catch (IOException e) {
        log.error(e);
        throw new FileDownloadError();
    }
}

From source file:gateway.controller.JobController.java

/**
 * Cancels a running Job, specified by it's Job Id.
 * /*from w ww.j ava  2s  .  c  o m*/
 * @see http://pz-swagger.stage.geointservices.io/#!/Job/delete_job_jobId
 * 
 * @param jobId
 *            The Id of the Job to delete.
 * @param user
 *            User information
 * @return No response body if successful, or an appropriate Error
 */
@RequestMapping(value = "/job/{jobId}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "Abort Job", notes = "Requests a Running Job to be cancelled. If the Job is already completed in some way, then cancellation will not occur.", tags = "Job")
@ApiResponses(value = {
        @ApiResponse(code = 200, message = "The Job has requested to be cancelled. This may take some time, as the process may not be in an easily cancelled state at the time the request is made.", response = SuccessResponse.class),
        @ApiResponse(code = 401, message = "Unauthorized", response = ErrorResponse.class),
        @ApiResponse(code = 404, message = "Not Found", response = ErrorResponse.class),
        @ApiResponse(code = 500, message = "Internal Error", response = JobErrorResponse.class) })
public ResponseEntity<PiazzaResponse> abortJob(
        @ApiParam(value = "Id of the Job to cancel.", required = true) @PathVariable(value = "jobId") String jobId,
        @ApiParam(value = "Details for the cancellation of the Job.") @RequestParam(value = "reason", required = false) String reason,
        Principal user) {
    try {
        // Log the request
        logger.log(String.format("User %s requested Job Abort for Job Id %s with reason %s",
                gatewayUtil.getPrincipalName(user), jobId, reason), PiazzaLogger.INFO);

        // Create the Request object.
        PiazzaJobRequest request = new PiazzaJobRequest();
        request.createdBy = gatewayUtil.getPrincipalName(user);
        request.jobType = new AbortJob(jobId, reason);

        // Send the message through Kafka to delete the Job. This message
        // will get picked up by whatever component is running the Job.
        ProducerRecord<String, String> abortMessage = JobMessageFactory.getAbortJobMessage(request,
                gatewayUtil.getUuid(), SPACE);
        gatewayUtil.sendKafkaMessage(abortMessage);

        // Proxy the request to the Job Manager, where the Job Table will be
        // updated.
        HttpHeaders headers = new HttpHeaders();
        headers.setContentType(MediaType.APPLICATION_JSON);
        HttpEntity<PiazzaJobRequest> entity = new HttpEntity<PiazzaJobRequest>(request, headers);
        try {
            return new ResponseEntity<PiazzaResponse>(
                    restTemplate.postForEntity(String.format("%s/%s", JOBMANAGER_URL, "abort"), entity,
                            SuccessResponse.class).getBody(),
                    HttpStatus.OK);
        } catch (HttpClientErrorException | HttpServerErrorException hee) {
            LOGGER.error("Error Requesting Job Cancellation", hee);
            return new ResponseEntity<PiazzaResponse>(
                    gatewayUtil.getErrorResponse(hee.getResponseBodyAsString()), hee.getStatusCode());
        }
    } catch (Exception exception) {
        String error = String.format("Error requesting Job Abort for Id %s: %s", jobId, exception.getMessage());
        LOGGER.error(error, exception);
        logger.log(error, PiazzaLogger.ERROR);
        return new ResponseEntity<PiazzaResponse>(new JobErrorResponse(jobId, error, "Gateway"),
                HttpStatus.INTERNAL_SERVER_ERROR);
    }
}