List of usage examples for com.fasterxml.jackson.dataformat.xml XmlMapper XmlMapper
public XmlMapper()
From source file:org.apache.nifi.processors.ParseCSV.ParseCSV.java
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final Charset charset = Charset.defaultCharset(); FlowFile flowFile = session.get();/*from w ww .ja v a2 s . c o m*/ if (flowFile == null) { return; } // TODO implement final Map<String, String> attributes = new LinkedHashMap<>(); final String format = context.getProperty(FORMAT).getValue(); final boolean create_attributes = Boolean.parseBoolean(context.getProperty(CREATE_ATTRIBUTES).getValue()); final char delimiter = context.getProperty(DELIMITER).getValue().charAt(0); final boolean with_header = Boolean.parseBoolean(context.getProperty(WITH_HEADER).getValue()); final String output_format = context.getProperty(OUTPUT_FORMAT).getValue(); final String custom_header = context.getProperty(CUSTOM_HEADER).getValue(); final String column_mask = context.getProperty(COLUMN_MASK).getValue(); final String column_encrypt = context.getProperty(COLUMN_ENCRYPT).getValue(); final String column_tokenize = context.getProperty(COLUMN_TOKENIZE).getValue(); final String tokenize_unique_identifier = context.getProperty(TOKENIZE_UNQIUE_IDENTIFIER).getValue(); final String tokenized_ouput = context.getProperty(TOKENIZED_OUTPUT).getValue(); final String encryptionKey = "Bar12345Bar12345"; final String static_schema = context.getProperty(STATIC_SCHEMA).getValue(); // new flowfile here final org.apache.nifi.util.ObjectHolder<FlowFile> holder = new org.apache.nifi.util.ObjectHolder<>(null); flowFile = session.write(flowFile, new StreamCallback() { @Override public void process(InputStream inputStream, OutputStream outputStream) throws IOException { CSVFormat csvFormat = buildFormat(format, delimiter, with_header, custom_header); CSVParser csvParser = new CSVParser(new InputStreamReader(inputStream, charset), csvFormat); CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(outputStream, charset), csvFormat); String headerArray[]; ArrayList<String> columnMaskList = new ArrayList<>(); ArrayList<String> columnEncryptList = new ArrayList<String>(); ArrayList<String> columnTokenizeList = new ArrayList<String>(); List<String> maskValueHolder = new LinkedList<>(); FlowFile tokenized = session.create(); // print header if needed if (custom_header != null && output_format.equals("CSV") && static_schema == null) { csvPrinter.printRecord(custom_header); headerArray = custom_header.split(","); } else if (static_schema != null && custom_header == null) { csvPrinter.printRecord(static_schema.replace("\"", "")); headerArray = static_schema.split(","); } else { headerArray = csvParser.getHeaderMap().keySet().toArray(new String[0]); csvPrinter.printRecord(headerArray); } if (column_mask != null) { columnMaskList = new ArrayList<>(Arrays.asList(column_mask.replace("\"", "").split(","))); } if (column_encrypt != null) { columnEncryptList = new ArrayList<>(Arrays.asList(column_encrypt.split(","))); } if (column_tokenize != null) { columnTokenizeList = new ArrayList<>(Arrays.asList(column_tokenize.split(","))); } // loop through records and print for (final CSVRecord record : csvParser) { Map<String, String> k = record.toMap(); for (Map.Entry<String, String> konj : k.entrySet()) { //System.out.println(konj.getValue()); } // generate attributes if required per record if (create_attributes) { for (int i = 0; i < headerArray.length; i++) { //attributes.put(headerArray[i], record.get(i)); attributes.put(headerArray[i] + "." + record.getRecordNumber(), record.get(i)); } } // check masked columns if (column_mask != null || column_encrypt != null) { // we have to loop through the header array and match user requested mask columns for (int i = 0; i < headerArray.length; i++) { //System.out.println(headerArray[i] + "." + record.getRecordNumber() + " - " + mask(record.get(i))); if (columnMaskList.contains(headerArray[i])) { // set mask maskValueHolder.add(mask(record.get(i))); // construct tokenization row for external DB store if (columnTokenizeList.contains(headerArray[i])) { final String tokenizedRow; tokenizedRow = tokenizationOut(tokenized_ouput, headerArray[i], tokenize_unique_identifier, mask(record.get(i)), record.get(i), Long.toString(record.getRecordNumber())); tokenized = session.append(tokenized, new OutputStreamCallback() { @Override public void process(OutputStream outputStream) throws IOException { outputStream.write(tokenizedRow.getBytes()); } }); } } else if (columnEncryptList.contains(headerArray[i])) { // encrypt maskValueHolder.add(new String(Encrypt(record.get(i), encryptionKey), "UTF-8")); } else { // no mask maskValueHolder.add(record.get(i)); } } csvPrinter.printRecord(maskValueHolder); // clear mask column holder maskValueHolder.clear(); } else { // no masking or encryption required, print record switch (output_format) { case "CSV": //csvPrinter.printRecord(record); List<String> items = Arrays.asList(static_schema.split(",")); String lastColumn = items.get(items.size() - 1); String test = ""; for (String item : items) { if (item != lastColumn) { test += record.get(item) + ","; } else { test += record.get(item); } } csvPrinter.printRecord(test.replace("^\"|\"$", "")); break; case "JSON": String json = new ObjectMapper().writer().withDefaultPrettyPrinter() .writeValueAsString(record.toMap()) + "\n"; if (json.length() > 0) { outputStream.write(json.getBytes()); } //List<Map<?, ?>> data = readObjectsFromCsv(inputStream); //String adis = writeAsJson(data); //outputStream.write(writeAsJson(data).getBytes()); break; case "XML": outputStream.write(new XmlMapper().writeValueAsString(record.toMap()).getBytes()); break; } } } csvPrinter.flush(); csvPrinter.close(); holder.set(tokenized); } }); flowFile = session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, RELATIONSHIP_SUCCESS); session.transfer(holder.get(), RELATIONSHIP_TOKENIZED); }
From source file:org.openmhealth.shim.healthvault.HealthvaultShim.java
@Override public ShimDataResponse getData(final ShimDataRequest shimDataRequest) throws ShimException { final HealthVaultDataType healthVaultDataType; try {/*from ww w.j av a2 s. c om*/ healthVaultDataType = HealthVaultDataType .valueOf(shimDataRequest.getDataTypeKey().trim().toUpperCase()); } catch (NullPointerException | IllegalArgumentException e) { throw new ShimException("Null or Invalid data type parameter: " + shimDataRequest.getDataTypeKey() + " in shimDataRequest, cannot retrieve data."); } final DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'hh:mm:ss"); /*** * Setup default date parameters */ DateTime today = new DateTime(); DateTime startDate = shimDataRequest.getStartDate() == null ? today.minusDays(1) : shimDataRequest.getStartDate(); String dateStart = startDate.toString(formatter); DateTime endDate = shimDataRequest.getEndDate() == null ? today.plusDays(1) : shimDataRequest.getEndDate(); String dateEnd = endDate.toString(formatter); long numToReturn = shimDataRequest.getNumToReturn() == null || shimDataRequest.getNumToReturn() <= 0 ? 100 : shimDataRequest.getNumToReturn(); Request request = new Request(); request.setMethodName("GetThings"); request.setInfo("<info>" + "<group max=\"" + numToReturn + "\">" + "<filter>" + "<type-id>" + healthVaultDataType.getDataTypeId() + "</type-id>" + "<eff-date-min>" + dateStart + "</eff-date-min>" + "<eff-date-max>" + dateEnd + "</eff-date-max>" + "</filter>" + "<format>" + "<section>core</section>" + "<xml/>" + "</format>" + "</group>" + "</info>"); RequestTemplate template = new RequestTemplate(connection); return template.makeRequest(shimDataRequest.getAccessParameters(), request, new Marshaller<ShimDataResponse>() { public ShimDataResponse marshal(InputStream istream) throws Exception { /** * XML Document mappings to JSON don't respect repeatable * tags, they don't get properly serialized into collections. * Thus, we pickup the 'things' via the 'group' root tag * and create a new JSON document out of each 'thing' node. */ XmlMapper xmlMapper = new XmlMapper(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(istream); NodeList nodeList = doc.getElementsByTagName("thing"); /** * Collect JsonNode from each 'thing' xml node. */ List<JsonNode> thingList = new ArrayList<>(); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); Document thingDoc = builder.newDocument(); Node newNode = thingDoc.importNode(node, true); thingDoc.appendChild(newNode); thingList.add(xmlMapper.readTree(convertDocumentToString(thingDoc))); } /** * Rebuild JSON document structure to pass to deserializer. */ String thingsJson = "{\"things\":["; String thingsContent = ""; for (JsonNode thingNode : thingList) { thingsContent += thingNode.toString() + ","; } thingsContent = "".equals(thingsContent) ? thingsContent : thingsContent.substring(0, thingsContent.length() - 1); thingsJson += thingsContent; thingsJson += "]}"; /** * Return raw re-built 'things' or a normalized JSON document. */ ObjectMapper objectMapper = new ObjectMapper(); if (shimDataRequest.getNormalize()) { SimpleModule module = new SimpleModule(); module.addDeserializer(ShimDataResponse.class, healthVaultDataType.getNormalizer()); objectMapper.registerModule(module); return objectMapper.readValue(thingsJson, ShimDataResponse.class); } else { return ShimDataResponse.result(HealthvaultShim.SHIM_KEY, objectMapper.readTree(thingsJson)); } } }); }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private static void handleSetContainerAcl(HttpServletRequest request, HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName) throws IOException, S3Exception { ContainerAccess access;// w ww. j av a2 s . com String cannedAcl = request.getHeader("x-amz-acl"); if (cannedAcl == null || "private".equalsIgnoreCase(cannedAcl)) { access = ContainerAccess.PRIVATE; } else if ("public-read".equalsIgnoreCase(cannedAcl)) { access = ContainerAccess.PUBLIC_READ; } else if (CANNED_ACLS.contains(cannedAcl)) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } PushbackInputStream pis = new PushbackInputStream(is); int ch = pis.read(); if (ch != -1) { pis.unread(ch); AccessControlPolicy policy = new XmlMapper().readValue(pis, AccessControlPolicy.class); String accessString = mapXmlAclsToCannedPolicy(policy); if (accessString.equals("private")) { access = ContainerAccess.PRIVATE; } else if (accessString.equals("public-read")) { access = ContainerAccess.PUBLIC_READ; } else { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } } blobStore.setContainerAccess(containerName, access); }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private static void handleSetBlobAcl(HttpServletRequest request, HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName, String blobName) throws IOException, S3Exception { BlobAccess access;//from w w w . j av a 2 s . c om String cannedAcl = request.getHeader("x-amz-acl"); if (cannedAcl == null || "private".equalsIgnoreCase(cannedAcl)) { access = BlobAccess.PRIVATE; } else if ("public-read".equalsIgnoreCase(cannedAcl)) { access = BlobAccess.PUBLIC_READ; } else if (CANNED_ACLS.contains(cannedAcl)) { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } PushbackInputStream pis = new PushbackInputStream(is); int ch = pis.read(); if (ch != -1) { pis.unread(ch); AccessControlPolicy policy = new XmlMapper().readValue(pis, AccessControlPolicy.class); String accessString = mapXmlAclsToCannedPolicy(policy); if (accessString.equals("private")) { access = BlobAccess.PRIVATE; } else if (accessString.equals("public-read")) { access = BlobAccess.PUBLIC_READ; } else { throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED); } } blobStore.setBlobAccess(containerName, blobName, access); }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private static void handleContainerCreate(HttpServletRequest request, HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName) throws IOException, S3Exception { if (containerName.isEmpty()) { throw new S3Exception(S3ErrorCode.METHOD_NOT_ALLOWED); }/*from w w w . j ava 2s. co m*/ if (containerName.length() < 3 || containerName.length() > 255 || containerName.startsWith(".") || containerName.endsWith(".") || validateIpAddress(containerName) || !VALID_BUCKET_FIRST_CHAR.matches(containerName.charAt(0)) || !VALID_BUCKET.matchesAllOf(containerName)) { throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME); } String contentLengthString = request.getHeader(HttpHeaders.CONTENT_LENGTH); if (contentLengthString != null) { long contentLength; try { contentLength = Long.parseLong(contentLengthString); } catch (NumberFormatException nfe) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, nfe); } if (contentLength < 0) { throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT); } } String locationString; try (PushbackInputStream pis = new PushbackInputStream(is)) { int ch = pis.read(); if (ch == -1) { // handle empty bodies locationString = null; } else { pis.unread(ch); CreateBucketRequest cbr = new XmlMapper().readValue(pis, CreateBucketRequest.class); locationString = cbr.locationConstraint; } } Location location = null; if (locationString != null) { for (Location loc : blobStore.listAssignableLocations()) { if (loc.getId().equalsIgnoreCase(locationString)) { location = loc; break; } } if (location == null) { throw new S3Exception(S3ErrorCode.INVALID_LOCATION_CONSTRAINT); } } logger.debug("Creating bucket with location: {}", location); CreateContainerOptions options = new CreateContainerOptions(); String acl = request.getHeader("x-amz-acl"); if ("public-read".equalsIgnoreCase(acl)) { options.publicRead(); } boolean created; try { created = blobStore.createContainerInLocation(location, containerName, options); } catch (AuthorizationException ae) { throw new S3Exception(S3ErrorCode.BUCKET_ALREADY_EXISTS, ae); } if (!created) { throw new S3Exception(S3ErrorCode.BUCKET_ALREADY_OWNED_BY_YOU, S3ErrorCode.BUCKET_ALREADY_OWNED_BY_YOU.getMessage(), null, ImmutableMap.of("BucketName", containerName)); } response.addHeader(HttpHeaders.LOCATION, "/" + containerName); }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleMultiBlobRemove(HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName) throws IOException { DeleteMultipleObjectsRequest dmor = new XmlMapper().readValue(is, DeleteMultipleObjectsRequest.class); Collection<String> blobNames = new ArrayList<>(); for (DeleteMultipleObjectsRequest.S3Object s3Object : dmor.objects) { blobNames.add(s3Object.key); }/*ww w . j av a2 s. co m*/ blobStore.removeBlobs(containerName, blobNames); try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("DeleteResult"); xml.writeDefaultNamespace(AWS_XMLNS); if (!dmor.quiet) { for (String blobName : blobNames) { xml.writeStartElement("Deleted"); writeSimpleElement(xml, "Key", blobName); xml.writeEndElement(); } } // TODO: emit error stanza xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleCompleteMultipartUpload(HttpServletResponse response, InputStream is, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { MultipartUpload mpu;/*from w w w. jav a 2 s . c o m*/ if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { Blob stubBlob = blobStore.getBlob(containerName, uploadId); BlobAccess access = blobStore.getBlobAccess(containerName, uploadId); mpu = MultipartUpload.create(containerName, blobName, uploadId, stubBlob.getMetadata(), new PutOptions().setBlobAccess(access)); } else { mpu = MultipartUpload.create(containerName, blobName, uploadId, new MutableBlobMetadataImpl(), new PutOptions()); } // List parts to get part sizes and to map multiple Azure parts // into single parts. ImmutableMap.Builder<Integer, MultipartPart> builder = ImmutableMap.builder(); for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { builder.put(part.partNumber(), part); } ImmutableMap<Integer, MultipartPart> partsByListing = builder.build(); List<MultipartPart> parts = new ArrayList<>(); String blobStoreType = getBlobStoreType(blobStore); if (blobStoreType.equals("azureblob")) { // TODO: how to sanity check parts? for (MultipartPart part : blobStore.listMultipartUpload(mpu)) { parts.add(part); } } else { CompleteMultipartUploadRequest cmu = new XmlMapper().readValue(is, CompleteMultipartUploadRequest.class); // use TreeMap to allow runt last part SortedMap<Integer, String> requestParts = new TreeMap<>(); if (cmu.parts != null) { for (CompleteMultipartUploadRequest.Part part : cmu.parts) { requestParts.put(part.partNumber, part.eTag); } } for (Iterator<Map.Entry<Integer, String>> it = requestParts.entrySet().iterator(); it.hasNext();) { Map.Entry<Integer, String> entry = it.next(); MultipartPart part = partsByListing.get(entry.getKey()); if (part == null) { throw new S3Exception(S3ErrorCode.INVALID_PART); } long partSize = part.partSize(); if (partSize < blobStore.getMinimumMultipartPartSize() && partSize != -1 && it.hasNext()) { throw new S3Exception(S3ErrorCode.ENTITY_TOO_SMALL); } if (part.partETag() != null && !equalsIgnoringSurroundingQuotes(part.partETag(), entry.getValue())) { throw new S3Exception(S3ErrorCode.INVALID_PART); } parts.add(MultipartPart.create(entry.getKey(), partSize, part.partETag(), part.lastModified())); } } if (parts.isEmpty()) { // Amazon requires at least one part throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L); } String eTag = blobStore.completeMultipartUpload(mpu, parts); if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(blobStore))) { blobStore.removeBlob(containerName, uploadId); } try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("CompleteMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); // TODO: bogus value writeSimpleElement(xml, "Location", "http://Example-Bucket.s3.amazonaws.com/" + blobName); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); if (eTag != null) { writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag)); } xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }