List of usage examples for io.netty.handler.codec.http DefaultHttpContent DefaultHttpContent
public DefaultHttpContent(ByteBuf content)
From source file:com.addthis.hydra.query.tracker.DetailedStatusHandler.java
License:Apache License
private void onSuccess(QueryEntryInfo queryEntryInfo) { try {/*from w w w . j a v a 2s .co m*/ JSONObject entryJSON = CodecJSON.encodeJSON(queryEntryInfo); writer.write(entryJSON.toString()); ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()), CharsetUtil.UTF_8); HttpContent content = new DefaultHttpContent(textResponse); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes()); if (HttpHeaders.isKeepAlive(request)) { response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } ctx.write(response); ctx.write(content); ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); if (!HttpHeaders.isKeepAlive(request)) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } } catch (Throwable t) { onFailure(t); } }
From source file:com.addthis.hydra.query.web.AbstractBufferingHttpBundleEncoder.java
License:Apache License
protected void flushStringBuilder(ChannelHandlerContext ctx) { if (sendBuffer.length() > 0) { ByteBuf msg = encodeString(ctx.alloc(), sendBuffer); sendBuffer.setLength(0);/* www.jav a 2 s . c o m*/ ctx.writeAndFlush(new DefaultHttpContent(msg), ctx.voidPromise()); } }
From source file:com.addthis.hydra.query.web.DetailedStatusHandler.java
License:Apache License
private void onSuccess(QueryEntryInfo queryEntryInfo) throws Exception { JSONObject entryJSON = CodecJSON.encodeJSON(queryEntryInfo); writer.write(entryJSON.toString());//from w w w . j av a2 s . c o m ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()), CharsetUtil.UTF_8); HttpContent content = new DefaultHttpContent(textResponse); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes()); if (HttpHeaders.isKeepAlive(request)) { response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } ctx.write(response); ctx.write(content); ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); if (!HttpHeaders.isKeepAlive(request)) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } }
From source file:com.addthis.hydra.query.web.GoogleDriveAuthentication.java
License:Apache License
/** * Send an HTML formatted error message. *///from w w w . j a v a2 s . c o m private static void sendErrorMessage(ChannelHandlerContext ctx, String message) throws IOException { HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); response.headers().set(CONTENT_TYPE, "text/html; charset=utf-8"); StringBuilderWriter writer = new StringBuilderWriter(50); writer.append("<html><head><title>Hydra Query Master</title></head><body>"); writer.append("<h3>"); writer.append(message); writer.append("</h3></body></html>"); ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()), CharsetUtil.UTF_8); HttpContent content = new DefaultHttpContent(textResponse); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes()); ctx.write(response); ctx.write(content); ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); lastContentFuture.addListener(ChannelFutureListener.CLOSE); }
From source file:com.addthis.hydra.query.web.HttpQueryHandler.java
License:Apache License
private void fastHandle(ChannelHandlerContext ctx, FullHttpRequest request, String target, KVPairs kv) throws Exception { StringBuilderWriter writer = new StringBuilderWriter(50); HttpResponse response = HttpUtils.startResponse(writer); response.headers().add("Access-Control-Allow-Origin", "*"); switch (target) { case "/metrics": fakeMetricsServlet.writeMetrics(writer, kv); break;/* w ww . j av a 2 s . c o m*/ case "/query/list": writer.write("[\n"); for (QueryEntryInfo stat : tracker.getRunning()) { writer.write(CodecJSON.encodeString(stat).concat(",\n")); } writer.write("]"); break; case "/completed/list": writer.write("[\n"); for (QueryEntryInfo stat : tracker.getCompleted()) { writer.write(CodecJSON.encodeString(stat).concat(",\n")); } writer.write("]"); break; case "/v2/host/list": case "/host/list": String queryStatusUuid = kv.getValue("uuid"); QueryEntry queryEntry = tracker.getQueryEntry(queryStatusUuid); if (queryEntry != null) { DetailedStatusHandler hostDetailsHandler = new DetailedStatusHandler(writer, response, ctx, request, queryEntry); hostDetailsHandler.handle(); return; } else { QueryEntryInfo queryEntryInfo = tracker.getCompletedQueryInfo(queryStatusUuid); if (queryEntryInfo != null) { JSONObject entryJSON = CodecJSON.encodeJSON(queryEntryInfo); writer.write(entryJSON.toString()); } else { throw new RuntimeException("could not find query"); } break; } case "/query/cancel": if (tracker.cancelRunning(kv.getValue("uuid"))) { writer.write("canceled " + kv.getValue("uuid")); } else { writer.write("canceled failed for " + kv.getValue("uuid")); response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); } break; case "/query/encode": { Query q = new Query(null, kv.getValue("query", kv.getValue("path", "")), null); JSONArray path = CodecJSON.encodeJSON(q).getJSONArray("path"); writer.write(path.toString()); break; } case "/query/decode": { String qo = "{path:" + kv.getValue("query", kv.getValue("path", "")) + "}"; Query q = CodecJSON.decodeString(new Query(), qo); writer.write(q.getPaths()[0]); break; } case "/v2/queries/finished.list": { JSONArray runningEntries = new JSONArray(); for (QueryEntryInfo entryInfo : tracker.getCompleted()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); //TODO: replace this with some high level summary entryJSON.put("hostInfoSet", ""); runningEntries.put(entryJSON); } writer.write(runningEntries.toString()); break; } case "/v2/queries/running.list": { JSONArray runningEntries = new JSONArray(); for (QueryEntryInfo entryInfo : tracker.getRunning()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); //TODO: replace this with some high level summary entryJSON.put("hostInfoSet", ""); runningEntries.put(entryJSON); } writer.write(runningEntries.toString()); break; } case "/v2/queries/workers": { JSONObject jsonObject = new JSONObject(); for (WorkerData workerData : meshQueryMaster.worky().values()) { jsonObject.put(workerData.hostName, workerData.queryLeases.availablePermits()); } writer.write(jsonObject.toString()); break; } case "/v2/queries/list": JSONArray queries = new JSONArray(); for (QueryEntryInfo entryInfo : tracker.getCompleted()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); entryJSON.put("state", 0); queries.put(entryJSON); } for (QueryEntryInfo entryInfo : tracker.getRunning()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); entryJSON.put("state", 3); queries.put(entryJSON); } writer.write(queries.toString()); break; case "/v2/job/list": { StringWriter swriter = new StringWriter(); final JsonGenerator json = QueryServer.factory.createJsonGenerator(swriter); json.writeStartArray(); for (IJob job : meshQueryMaster.keepy().getJobs()) { if (job.getQueryConfig() != null && job.getQueryConfig().getCanQuery()) { List<JobTask> tasks = job.getCopyOfTasks(); String uuid = job.getId(); json.writeStartObject(); json.writeStringField("id", uuid); json.writeStringField("description", Optional.fromNullable(job.getDescription()).or("")); json.writeNumberField("state", job.getState().ordinal()); json.writeStringField("creator", job.getCreator()); json.writeNumberField("submitTime", Optional.fromNullable(job.getSubmitTime()).or(-1L)); json.writeNumberField("startTime", Optional.fromNullable(job.getStartTime()).or(-1L)); json.writeNumberField("endTime", Optional.fromNullable(job.getStartTime()).or(-1L)); json.writeNumberField("replicas", Optional.fromNullable(job.getReplicas()).or(0)); json.writeNumberField("backups", Optional.fromNullable(job.getBackups()).or(0)); json.writeNumberField("nodes", tasks.size()); json.writeEndObject(); } } json.writeEndArray(); json.close(); writer.write(swriter.toString()); break; } case "/v2/settings/git.properties": { StringWriter swriter = new StringWriter(); final JsonGenerator json = QueryServer.factory.createJsonGenerator(swriter); Properties gitProperties = new Properties(); json.writeStartObject(); try { InputStream in = queryServer.getClass().getResourceAsStream("/git.properties"); gitProperties.load(in); in.close(); json.writeStringField("commitIdAbbrev", gitProperties.getProperty("git.commit.id.abbrev")); json.writeStringField("commitUserEmail", gitProperties.getProperty("git.commit.user.email")); json.writeStringField("commitMessageFull", gitProperties.getProperty("git.commit.message.full")); json.writeStringField("commitId", gitProperties.getProperty("git.commit.id")); json.writeStringField("commitUserName", gitProperties.getProperty("git.commit.user.name")); json.writeStringField("buildUserName", gitProperties.getProperty("git.build.user.name")); json.writeStringField("commitIdDescribe", gitProperties.getProperty("git.commit.id.describe")); json.writeStringField("buildUserEmail", gitProperties.getProperty("git.build.user.email")); json.writeStringField("branch", gitProperties.getProperty("git.branch")); json.writeStringField("commitTime", gitProperties.getProperty("git.commit.time")); json.writeStringField("buildTime", gitProperties.getProperty("git.build.time")); } catch (Exception ex) { log.warn("Error loading git.properties, possibly jar was not compiled with maven."); } json.writeEndObject(); json.close(); writer.write(swriter.toString()); break; } default: // forward to static file server ctx.pipeline().addLast(staticFileHandler); request.retain(); ctx.fireChannelRead(request); return; // don't do text response clean up } log.trace("response being sent {}", writer); ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()), CharsetUtil.UTF_8); HttpContent content = new DefaultHttpContent(textResponse); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes()); if (HttpHeaders.isKeepAlive(request)) { response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } ctx.write(response); ctx.write(content); ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); log.trace("response pending"); if (!HttpHeaders.isKeepAlive(request)) { log.trace("Setting close listener"); lastContentFuture.addListener(ChannelFutureListener.CLOSE); } }
From source file:com.addthis.hydra.query.web.LegacyHandler.java
License:Apache License
public static Query handleQuery(Query query, KVPairs kv, HttpRequest request, ChannelHandlerContext ctx) throws IOException, QueryException { String async = kv.getValue("async"); if (async == null) { return query; } else if (async.equals("new")) { StringBuilderWriter writer = new StringBuilderWriter(50); HttpResponse response = HttpUtils.startResponse(writer); String asyncUuid = genAsyncUuid(); asyncCache.put(asyncUuid, query); if (query.isTraced()) { Query.emitTrace("async create " + asyncUuid + " from " + query); }//from ww w. j ava2 s .c o m writer.write("{\"id\":\"" + asyncUuid + "\"}"); ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()), CharsetUtil.UTF_8); HttpContent content = new DefaultHttpContent(textResponse); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes()); if (HttpHeaders.isKeepAlive(request)) { response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } ctx.write(response); ctx.write(content); ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); if (!HttpHeaders.isKeepAlive(request)) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } return null; } else { Query asyncQuery = asyncCache.getIfPresent(async); asyncCache.invalidate(async); if (query.isTraced()) { Query.emitTrace("async restore " + async + " as " + asyncQuery); } if (asyncQuery != null) { return asyncQuery; } else { throw new QueryException("Missing Async Id"); } } }
From source file:com.aerofs.baseline.http.EntityOutputStream.java
License:Apache License
private void writeChunk() { Preconditions.checkNotNull(chunk, "null chunk prior to write"); ByteBuf forwarded = chunk;// w w w.j a v a 2 s. c om chunk = null; ctx.write(new DefaultHttpContent(forwarded)); // pass ownership to next handler }
From source file:com.bloom.zerofs.rest.NettyMultipartRequest.java
License:Open Source License
/** * Processes a single decoded part in a multipart request. Exposes the data in the part either through the channel * itself (if it is the blob part) or via {@link #getArgs()}. * @param part the {@link InterfaceHttpData} that needs to be processed. * @throws RestServiceException if the request channel is closed, if there is more than one part of the same name, if * the size obtained from the headers does not match the actual size of the blob part or * if {@code part} is not of the expected type ({@link FileUpload}). */// w ww. jav a 2s.c o m private void processPart(InterfaceHttpData part) throws RestServiceException { if (part.getHttpDataType() == InterfaceHttpData.HttpDataType.FileUpload) { FileUpload fileUpload = (FileUpload) part; if (fileUpload.getName().equals(RestUtils.MultipartPost.BLOB_PART)) { // this is actual data. if (hasBlob) { nettyMetrics.repeatedPartsError.inc(); throw new RestServiceException("Request has more than one " + RestUtils.MultipartPost.BLOB_PART, RestServiceErrorCode.BadRequest); } else { hasBlob = true; if (fileUpload.length() != getSize()) { nettyMetrics.multipartRequestSizeMismatchError.inc(); throw new RestServiceException("Request size [" + fileUpload.length() + "] does not match Content-Length [" + getSize() + "]", RestServiceErrorCode.BadRequest); } else { contentLock.lock(); try { if (isOpen()) { requestContents.add( new DefaultHttpContent(ReferenceCountUtil.retain(fileUpload.content()))); } else { nettyMetrics.multipartRequestAlreadyClosedError.inc(); throw new RestServiceException("Request is closed", RestServiceErrorCode.RequestChannelClosed); } } finally { contentLock.unlock(); } } } } else { // this is any kind of data. (For Amber, this will be user metadata). // TODO: find a configurable way of rejecting unexpected file parts. String name = fileUpload.getName(); if (allArgs.containsKey(name)) { nettyMetrics.repeatedPartsError.inc(); throw new RestServiceException("Request already has a component named " + name, RestServiceErrorCode.BadRequest); } else { ByteBuffer buffer = ByteBuffer.allocate(fileUpload.content().readableBytes()); // TODO: Possible optimization - Upgrade ByteBufferReadableStreamChannel to take a list of ByteBuffer. This // TODO: will avoid the copy. fileUpload.content().readBytes(buffer); buffer.flip(); allArgs.put(name, buffer); } } } else { nettyMetrics.unsupportedPartError.inc(); throw new RestServiceException("Unexpected HTTP data", RestServiceErrorCode.BadRequest); } }
From source file:com.couchbase.client.core.endpoint.config.ConfigHandlerTest.java
License:Apache License
@Test public void shouldDecodeSuccessBucketConfigResponse() throws Exception { HttpResponse responseHeader = new DefaultHttpResponse(HttpVersion.HTTP_1_1, new HttpResponseStatus(200, "OK")); HttpContent responseChunk1 = new DefaultHttpContent(Unpooled.copiedBuffer("foo", CHARSET)); HttpContent responseChunk2 = new DefaultLastHttpContent(Unpooled.copiedBuffer("bar", CHARSET)); BucketConfigRequest requestMock = mock(BucketConfigRequest.class); requestQueue.add(requestMock);/* w w w . j av a 2 s.c o m*/ channel.writeInbound(responseHeader, responseChunk1, responseChunk2); channel.readInbound(); assertEquals(1, eventSink.responseEvents().size()); BucketConfigResponse event = (BucketConfigResponse) eventSink.responseEvents().get(0).getMessage(); assertEquals(ResponseStatus.SUCCESS, event.status()); assertEquals("foobar", event.config()); assertTrue(requestQueue.isEmpty()); }
From source file:com.couchbase.client.core.endpoint.config.ConfigHandlerTest.java
License:Apache License
@Test public void shouldDecodeListDesignDocumentsResponse() throws Exception { HttpResponse responseHeader = new DefaultHttpResponse(HttpVersion.HTTP_1_1, new HttpResponseStatus(200, "OK")); HttpContent responseChunk1 = new DefaultHttpContent(Unpooled.copiedBuffer("foo", CharsetUtil.UTF_8)); HttpContent responseChunk2 = new DefaultLastHttpContent(Unpooled.copiedBuffer("bar", CharsetUtil.UTF_8)); GetDesignDocumentsRequest requestMock = mock(GetDesignDocumentsRequest.class); requestQueue.add(requestMock);// w w w. j a v a 2s . c o m channel.writeInbound(responseHeader, responseChunk1, responseChunk2); assertEquals(1, eventSink.responseEvents().size()); GetDesignDocumentsResponse event = (GetDesignDocumentsResponse) eventSink.responseEvents().get(0) .getMessage(); assertEquals(ResponseStatus.SUCCESS, event.status()); assertEquals("foobar", event.content()); assertTrue(requestQueue.isEmpty()); }