List of usage examples for io.vertx.core MultiMap get
@Nullable String get(String name);
From source file:org.sfs.elasticsearch.container.ListObjects.java
License:Apache License
@Override public Observable<ObjectList> call(PersistentContainer container) { MultiMap queryParams = sfsRequest.params(); Elasticsearch elasticSearch = vertxContext.verticle().elasticsearch(); final String limit = queryParams.get(LIMIT); String marker = unescape(queryParams.get(MARKER)); String endMarker = unescape(queryParams.get(END_MARKER)); final String prefix = unescape(queryParams.get(PREFIX)); final String delimiter = unescape(queryParams.get(DELIMITER)); Integer parsedLimit = !isNullOrEmpty(limit) ? tryParse(limit) : valueOf(10000); parsedLimit = parsedLimit == null || parsedLimit < 0 || parsedLimit > 10000 ? 10000 : parsedLimit; String containerId = container.getId(); String containerPrefix = containerId + ObjectPath.DELIMITER; if (!isNullOrEmpty(prefix)) { containerPrefix += prefix;// ww w . ja v a 2 s .co m } String objectIndex = elasticSearch.objectIndex(container.getName()); final SearchRequestBuilder scrollRequest = elasticSearch.get().prepareSearch(objectIndex) .setTypes(elasticSearch.defaultType()).addSort(DOC_FIELD_NAME, ASC) .setScroll(timeValueMillis(elasticSearch.getDefaultScrollTimeout())) .setTimeout(timeValueMillis(elasticSearch.getDefaultSearchTimeout() - 10)) .setQuery(prefixQuery("_id", containerPrefix)).setSize(100); final Integer finalParsedLimit = parsedLimit; final NavigableMap<String, ListedObject> listedObjects = new TreeMap<>(); return scan(container, prefix, delimiter, marker, endMarker, finalParsedLimit, elasticSearch, scrollRequest, listedObjects).map(aVoid -> new ObjectList(container, listedObjects.values())) .onErrorResumeNext(throwable -> { if (containsException(IndexNotFoundException.class, throwable)) { return just(new ObjectList(container, emptyList())); } else { return error(throwable); } }); }
From source file:org.sfs.filesystem.volume.DigestBlob.java
License:Apache License
public DigestBlob(HttpClientResponse httpClientResponse) { super(httpClientResponse); digests = new HashMap<>(); BaseEncoding baseEncoding = base64(); MultiMap headers = httpClientResponse.headers(); for (String headerName : headers.names()) { Matcher matcher = COMPUTED_DIGEST.matcher(headerName); if (matcher.find()) { String digestName = matcher.group(1); Optional<MessageDigestFactory> oMessageDigestFactory = fromValueIfExists(digestName); if (oMessageDigestFactory.isPresent()) { MessageDigestFactory messageDigestFactory = oMessageDigestFactory.get(); withDigest(messageDigestFactory, baseEncoding.decode(headers.get(headerName))); }/*from w w w .j av a 2 s .c o m*/ } } }
From source file:org.sfs.filesystem.volume.HeaderBlob.java
License:Apache License
public HeaderBlob(HttpClientResponse httpClientResponse) { MultiMap headers = httpClientResponse.headers(); this.volume = headers.get(X_CONTENT_VOLUME); this.position = parseLong(headers.get(X_CONTENT_POSITION)); this.length = parseLong(headers.get(X_CONTENT_LENGTH)); }
From source file:org.sfs.integration.java.func.AssertObjectHeaders.java
License:Apache License
@Override public HttpClientResponse call(HttpClientResponse httpClientResponse) { out.println("Assert #" + assertIndex); MultiMap headers = httpClientResponse.headers(); String etag = headers.get(ETAG); String contentMd5 = headers.get(CONTENT_MD5); String contentSha512 = headers.get(X_CONTENT_SHA512); String versionId = headers.get(X_CONTENT_VERSION); String contentLength = headers.get(CONTENT_LENGTH); String acceptRanges = headers.get(ACCEPT_RANGES); String lastModified = headers.get(LAST_MODIFIED); String date = headers.get(DATE); String serverSideEncryption = headers.get(X_SERVER_SIDE_ENCRYPTION); assertEquals(context, base16().lowerCase().encode(dataMd5), etag); assertEquals(context, base64().encode(dataMd5), contentMd5); assertEquals(context, base64().encode(dataSha512), contentSha512); assertEquals(context, expectedVersion, parseLong(versionId)); //VertxAssert.assertEquals(context, expectedContentLength, Long.parseLong(contentLength)); assertEquals(context, "none", acceptRanges); assertNotNull(context, lastModified); assertNotNull(context, date);/*from ww w.j a v a 2 s. com*/ assertEquals(context, this.serverSideEncryption, parseBoolean(serverSideEncryption)); return httpClientResponse; }
From source file:org.sfs.jobs.JobParams.java
License:Apache License
public static String getFirstOptionalParam(MultiMap params, String name) { return params.get(name); }
From source file:org.sfs.nodes.compute.account.GetAccount.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAuthenticated(httpServerRequest)) .map(aVoid -> fromSfsRequest(httpServerRequest)).map(new ValidateAccountPath()) .map(objectPath -> objectPath.accountPath().get()) .flatMap(new LoadAccount(httpServerRequest.vertxContext())) .map(new ValidatePersistentAccountExists()).flatMap(new ListContainers(httpServerRequest)) .flatMap(containerList -> { HttpServerResponse httpServerResponse = httpServerRequest.response(); MultiMap headerParams = httpServerRequest.headers(); MultiMap queryParams = httpServerRequest.params(); String format = queryParams.get(FORMAT); String accept = headerParams.get(ACCEPT); Account account = containerList.getAccount(); Metadata metadata = account.getMetadata(); for (String key : metadata.keySet()) { SortedSet<String> values = metadata.get(key); if (values != null && !values.isEmpty()) { httpServerResponse.putHeader(format("%s%s", X_ADD_ACCOUNT_META_PREFIX, key), values); }// w ww . ja v a 2s .c o m } httpServerResponse.putHeader(X_ACCOUNT_OBJECT_COUNT, valueOf(containerList.getObjectCount())); httpServerResponse.putHeader(X_ACCOUNT_CONTAINER_COUNT, valueOf(containerList.getContainerCount())); httpServerResponse.putHeader(X_ACCOUNT_BYTES_USED, BigDecimal.valueOf(containerList.getBytesUsed()).setScale(0, ROUND_HALF_UP).toString()); MediaType parsedAccept = null; if (!isNullOrEmpty(accept)) { parsedAccept = parse(accept); } if (equalsIgnoreCase("xml", format)) { parsedAccept = APPLICATION_XML_UTF_8; } else if (equalsIgnoreCase("json", format)) { parsedAccept = JSON_UTF_8; } httpServerResponse.setStatusCode(HTTP_OK); if (parsedAccept != null && JSON_UTF_8.is(parsedAccept)) { String charset = UTF_8.toString(); JsonArray array = new JsonArray(); for (SparseContainer container : ordered(containerList.getContainers())) { array.add(new JsonObject().put("name", container.getContainerName()) .put("count", container.getObjectCount()) .put("bytes", BigDecimal.valueOf(container.getByteCount()) .setScale(0, ROUND_HALF_UP).longValue())); } Buffer buffer = buffer(array.encode(), charset); httpServerResponse = httpServerResponse.putHeader(CONTENT_TYPE, JSON_UTF_8.toString()); httpServerResponse = httpServerResponse.putHeader(CONTENT_LENGTH, valueOf(buffer.length())); return AsyncIO.append(buffer, httpServerResponse); } else if (parsedAccept != null && APPLICATION_XML_UTF_8.is(parsedAccept)) { BufferOutputStream bufferOutputStream = new BufferOutputStream(); String charset = UTF_8.toString(); XMLStreamWriter writer = null; try { writer = newFactory().createXMLStreamWriter(bufferOutputStream, charset); writer.writeStartDocument(charset, "1.0"); writer.writeStartElement("account"); writer.writeAttribute("name", fromPaths(account.getId()).accountName().get()); for (SparseContainer container : ordered(containerList.getContainers())) { writer.writeStartElement("container"); writer.writeStartElement("name"); writer.writeCharacters(container.getContainerName()); writer.writeEndElement(); writer.writeStartElement("count"); writer.writeCharacters(valueOf(container.getObjectCount())); writer.writeEndElement(); writer.writeStartElement("bytes"); writer.writeCharacters(BigDecimal.valueOf(container.getByteCount()) .setScale(0, ROUND_HALF_UP).toString()); writer.writeEndElement(); writer.writeEndElement(); } writer.writeEndElement(); writer.writeEndDocument(); } catch (XMLStreamException e) { throw new RuntimeException(e); } finally { try { if (writer != null) { writer.close(); } } catch (XMLStreamException e) { LOGGER.warn(e.getLocalizedMessage(), e); } } Buffer buffer = bufferOutputStream.toBuffer(); httpServerResponse = httpServerResponse.putHeader(CONTENT_TYPE, APPLICATION_XML_UTF_8.toString()); httpServerResponse = httpServerResponse.putHeader(CONTENT_LENGTH, valueOf(buffer.length())); return AsyncIO.append(buffer, httpServerResponse); } else { String charset = UTF_8.toString(); Buffer buffer = buffer(); for (SparseContainer container : ordered(containerList.getContainers())) { buffer.appendString(container.getContainerName(), charset); buffer.appendString("\n", charset); } httpServerResponse = httpServerResponse.putHeader(CONTENT_TYPE, PLAIN_TEXT_UTF_8.toString()); httpServerResponse = httpServerResponse.putHeader(CONTENT_LENGTH, valueOf(buffer.length())); return AsyncIO.append(buffer, httpServerResponse); } }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { } }); }
From source file:org.sfs.nodes.compute.container.DeleteContainer.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAuthenticated(httpServerRequest)).doOnNext(aVoid -> { MultiMap params = httpServerRequest.params(); String bulkDelete = params.get("bulk-delete"); if ("1".equals(bulkDelete)) { throw new HttpRequestValidationException(HTTP_CONFLICT, new JsonObject().put("message", "bulk-delete not supported")); }/*from ww w. j ava 2s . c o m*/ }).map(aVoid -> fromSfsRequest(httpServerRequest)).map(new ValidateContainerPath()) .flatMap(new LoadAccountAndContainer(vertxContext)) .flatMap(new ValidateActionContainerDelete(httpServerRequest)) .flatMap(new ValidateContainerIsEmpty(vertxContext)).flatMap(new RemoveObjectIndex(vertxContext)) .flatMap(new RemoveContainerKeys(vertxContext)) .flatMap(new RemoveContainer(httpServerRequest.vertxContext())) .map(new ValidateOptimisticContainerLock()).map(new ToVoid<>()).single() .subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { } }); }
From source file:org.sfs.nodes.compute.container.DeleteOrDestroyContainer.java
License:Apache License
@Override public void handle(SfsRequest sfsRequest) { MultiMap queryParams = sfsRequest.params(); if (Objects.equals("1", queryParams.get(SfsHttpQueryParams.DESTROY))) { DestroyContainer destroyContainer = new DestroyContainer(); destroyContainer.handle(sfsRequest); } else {/*from w w w . ja v a 2 s. c o m*/ DeleteContainer deleteContainer = new DeleteContainer(); deleteContainer.handle(sfsRequest); } }
From source file:org.sfs.nodes.compute.container.ExportContainer.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); SfsVertx sfsVertx = vertxContext.vertx(); Context context = sfsVertx.getOrCreateContext(); aVoid().flatMap(new Authenticate(httpServerRequest)).flatMap(new ValidateActionAdmin(httpServerRequest)) .map(aVoid -> httpServerRequest).map(new ValidateHeaderExists(X_SFS_DEST_DIRECTORY)) .map(new ValidateHeaderIsBoolean(X_SFS_COMPRESS)) .map(new ValidateHeaderIsBase64Encoded(X_SFS_SECRET)) .map(new ValidateHeaderBetweenLong(X_SFS_KEEP_ALIVE_TIMEOUT, 10000, 300000)) .map(aVoid -> fromSfsRequest(httpServerRequest)).map(new ValidateContainerPath()) .flatMap(new LoadAccountAndContainer(vertxContext)).flatMap(persistentContainer -> { MultiMap headers = httpServerRequest.headers(); String destDirectory = headers.get(X_SFS_DEST_DIRECTORY); boolean compress = "true".equalsIgnoreCase(headers.get(X_SFS_COMPRESS)); byte[] secret = headers.contains(X_SFS_SECRET) ? base64().decode(headers.get(X_SFS_SECRET)) : null;/*from ww w . j a va 2s. c o m*/ return aVoid().flatMap(aVoid -> { ObservableFuture<Boolean> handler = RxHelper.observableFuture(); vertxContext.vertx().fileSystem().exists(destDirectory, handler.toHandler()); return handler.map(destDirectoryExists -> { if (!TRUE.equals(destDirectoryExists)) { JsonObject jsonObject = new JsonObject().put("message", format("%s does not exist", destDirectory)); throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject); } else { return (Void) null; } }); }).flatMap(oVoid -> { ObservableFuture<List<String>> handler = RxHelper.observableFuture(); vertxContext.vertx().fileSystem().readDir(destDirectory, handler.toHandler()); return handler.map(listing -> { if (listing.size() > 0) { JsonObject jsonObject = new JsonObject().put("message", format("%s is not empty", destDirectory)); throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject); } else { return (Void) null; } }); }).flatMap(aVoid -> { LOGGER.info("Exporting container " + persistentContainer.getId() + " to " + destDirectory); JournalFile dumpFile = new JournalFile(get(destDirectory).resolve(DUMP_FILE_NAME)); return dumpFile.open(vertxContext.vertx()) .flatMap(aVoid1 -> dumpFile.enableWrites(vertxContext.vertx())) .map(aVoid1 -> dumpFile); }).flatMap(dumpFile -> { httpServerRequest.startProxyKeepAlive(); Elasticsearch elasticsearch = vertxContext.verticle().elasticsearch(); String containerId = persistentContainer.getId(); String objectIndex = elasticsearch.objectIndex(persistentContainer.getName()); long now = System.currentTimeMillis() - VerifyRepairAllContainerObjects.CONSISTENCY_THRESHOLD; Calendar consistencyThreshold = Calendar.getInstance(); consistencyThreshold.setTimeInMillis(now); TermQueryBuilder containerIdQuery = termQuery("container_id", containerId); ScanAndScrollStreamProducer producer = new ScanAndScrollStreamProducer(vertxContext, containerIdQuery).setIndeces(objectIndex).setTypes(elasticsearch.defaultType()) .setReturnVersion(true); DumpFileWriter fileWriter = new DumpFileWriter(vertxContext, persistentContainer, dumpFile); if (compress) { fileWriter.enableDataCompression(); } if (secret != null) { fileWriter.enableDataEncryption(secret); } return pump(producer, fileWriter).map(aVoid -> dumpFile); }).flatMap(journalFile -> journalFile.disableWrites(vertxContext.vertx()) .map(aVoid -> journalFile)) .flatMap(journalFile -> journalFile.force(vertxContext.vertx(), true) .map(aVoid -> journalFile)) .flatMap(journalFile -> journalFile.close(vertxContext.vertx()) .map(aVoid -> journalFile)) .flatMap(journalFile -> RxHelper.executeBlocking(context, sfsVertx.getBackgroundPool(), () -> { try { write(get(destDirectory).resolve(".successful"), new byte[0], CREATE_NEW, WRITE); return (Void) null; } catch (IOException e) { throw new RuntimeException(e); } })) .doOnNext(aVoid -> LOGGER.info("Done exporting container " + persistentContainer.getId() + " to " + destDirectory)) .onErrorResumeNext(throwable -> { LOGGER.info("Failed exporting container " + persistentContainer.getId() + " to " + destDirectory, throwable); return Observable.error(throwable); }); }).map(new ToVoid<>()).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject jsonResponse = new JsonObject(); jsonResponse.put("code", HTTP_OK); HttpServerResponse httpResponse = httpServerRequest.response(); httpResponse.write(jsonResponse.encode(), UTF_8.toString()).write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.GetContainer.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAuthenticated(httpServerRequest)) .map(aVoid -> fromSfsRequest(httpServerRequest)).map(new ValidateContainerPath()) .flatMap(new LoadAccountAndContainer(vertxContext)) .flatMap(new ValidateActionContainerListObjects(httpServerRequest)).flatMap(persistentContainer -> { HttpServerResponse httpServerResponse = httpServerRequest.response(); MultiMap queryParams = httpServerRequest.params(); MultiMap headerParams = httpServerRequest.headers(); String format = queryParams.get(FORMAT); String accept = headerParams.get(ACCEPT); MediaType parsedAccept = null; if (equalsIgnoreCase("xml", format)) { parsedAccept = APPLICATION_XML_UTF_8; } else if (equalsIgnoreCase("json", format)) { parsedAccept = JSON_UTF_8; }// ww w. jav a 2 s . co m if (parsedAccept == null) { if (!isNullOrEmpty(accept)) { parsedAccept = parse(accept); } } if (parsedAccept == null || (!PLAIN_TEXT_UTF_8.is(parsedAccept) && !APPLICATION_XML_UTF_8.is(parsedAccept) && !JSON_UTF_8.equals(parsedAccept))) { parsedAccept = PLAIN_TEXT_UTF_8; } Observable<Optional<ContainerStats>> oContainerStats; boolean hasPrefix = !Strings.isNullOrEmpty(queryParams.get(SfsHttpQueryParams.PREFIX)); if (hasPrefix) { oContainerStats = just(persistentContainer) .flatMap(new LoadContainerStats(httpServerRequest.vertxContext())) .map(Optional::of); } else { oContainerStats = Defer.just(Optional.<ContainerStats>absent()); } Observable<ObjectList> oObjectListing = just(persistentContainer) .flatMap(new ListObjects(httpServerRequest)); MediaType finalParsedAccept = parsedAccept; return combineSinglesDelayError(oContainerStats, oObjectListing, (containerStats, objectList) -> { if (containerStats.isPresent()) { Metadata metadata = persistentContainer.getMetadata(); for (String key : metadata.keySet()) { SortedSet<String> values = metadata.get(key); if (values != null && !values.isEmpty()) { httpServerResponse.putHeader( format("%s%s", X_ADD_CONTAINER_META_PREFIX, key), values); } } httpServerResponse.putHeader(X_CONTAINER_OBJECT_COUNT, valueOf(containerStats.get().getObjectCount())); httpServerResponse.putHeader(X_CONTAINER_BYTES_USED, BigDecimal.valueOf(containerStats.get().getBytesUsed()) .setScale(0, ROUND_HALF_UP).toString()); } BufferOutputStream bufferOutputStream = new BufferOutputStream(); if (JSON_UTF_8.is(finalParsedAccept)) { try { JsonFactory jsonFactory = vertxContext.verticle().jsonFactory(); JsonGenerator jg = jsonFactory.createGenerator(bufferOutputStream, UTF8); jg.writeStartArray(); for (ListedObject listedObject : ordered(objectList.getObjects())) { jg.writeStartObject(); jg.writeStringField("hash", base16().lowerCase().encode(listedObject.getEtag())); jg.writeStringField("last_modified", toDateTimeString(listedObject.getLastModified())); jg.writeNumberField("bytes", listedObject.getLength()); jg.writeStringField("content_type", listedObject.getContentType()); jg.writeStringField("name", listedObject.getName()); jg.writeEndObject(); } jg.writeEndArray(); jg.close(); } catch (IOException e) { throw new RuntimeException(e); } } else if (APPLICATION_XML_UTF_8.is(finalParsedAccept)) { String charset = UTF_8.toString(); XMLStreamWriter writer = null; try { writer = newFactory().createXMLStreamWriter(bufferOutputStream, charset); writer.writeStartDocument(charset, "1.0"); writer.writeStartElement("container"); writer.writeAttribute("name", fromPaths(persistentContainer.getId()).containerName().get()); for (ListedObject listedObject : ordered(objectList.getObjects())) { writer.writeStartElement("object"); writer.writeStartElement("name"); writer.writeCharacters(listedObject.getName()); writer.writeEndElement(); writer.writeStartElement("hash"); writer.writeCharacters( base16().lowerCase().encode(listedObject.getEtag())); writer.writeEndElement(); writer.writeStartElement("bytes"); writer.writeCharacters(valueOf(listedObject.getLength())); writer.writeEndElement(); writer.writeStartElement("content_type"); writer.writeCharacters(listedObject.getContentType()); writer.writeEndElement(); writer.writeStartElement("last_modified"); writer.writeCharacters( toDateTimeString(listedObject.getLastModified())); writer.writeEndElement(); writer.writeEndElement(); } writer.writeEndElement(); writer.writeEndDocument(); } catch (XMLStreamException e) { throw new RuntimeException(e); } finally { try { if (writer != null) { writer.close(); } } catch (XMLStreamException e) { LOGGER.warn(e.getLocalizedMessage(), e); } } } else { String charset = UTF_8.toString(); try (OutputStreamWriter outputStreamWriter = new OutputStreamWriter( bufferOutputStream, charset)) { for (ListedObject listedObject : ordered(objectList.getObjects())) { outputStreamWriter.write(listedObject.getName()); outputStreamWriter.write("\n"); } } catch (IOException e) { throw new RuntimeException(e); } } objectList.clear(); return bufferOutputStream; }).flatMap(bufferOutputStream -> { Buffer buffer = bufferOutputStream.toBuffer(); httpServerResponse.putHeader(HttpHeaders.CONTENT_TYPE, finalParsedAccept.toString()) .putHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(buffer.length())); return AsyncIO.append(buffer, httpServerRequest.response()); }); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { } } ); }