Example usage for com.fasterxml.jackson.databind.node JsonNodeFactory JsonNodeFactory

List of usage examples for com.fasterxml.jackson.databind.node JsonNodeFactory JsonNodeFactory

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind.node JsonNodeFactory JsonNodeFactory.

Prototype

protected JsonNodeFactory(boolean paramBoolean) 

Source Link

Usage

From source file:com.redhat.lightblue.metadata.rdbms.impl.RDBMSPropertyParserImplTest.java

@Test
public void testParse_Delete() throws IOException {
    MetadataParser<JsonNode> p = new JSONMetadataParser(new Extensions<JsonNode>(), new DefaultTypes(),
            new JsonNodeFactory(true));

    JsonNode node = loadJsonNode("RdbmsMetadataTest-delete.json");

    RDBMS rdbms = new RDBMSPropertyParserImpl<JsonNode>().parse(RDBMSPropertyParserImpl.NAME, p,
            node.get("rdbms"));

    assertEquals(DialectOperators.ORACLE, rdbms.getDialect());
    assertNotNull(rdbms.getDelete());/*from   w w w  .ja v  a 2  s .  c o  m*/
    assertNull(rdbms.getFetch());
    assertNull(rdbms.getInsert());
    assertNull(rdbms.getSave());
    assertNull(rdbms.getUpdate());
}

From source file:uk.ac.ebi.eva.server.ws.ArchiveWSServer.java

@RequestMapping(value = "/studies/stats", method = RequestMethod.GET)
public QueryResponse getStudiesStats(@RequestParam(name = "species", required = false) List<String> species,
        @RequestParam(name = "type", required = false) List<String> types,
        @RequestParam(name = "structural", defaultValue = "false") boolean structural) {
    initializeQueryOptions();/*from  w  w  w.  ja v  a  2  s. c om*/
    if (species != null && !species.isEmpty()) {
        queryOptions.put("species", species);
    }
    if (types != null && !types.isEmpty()) {
        queryOptions.put("type", types);
    }

    QueryResult<Map.Entry<String, Integer>> resultSpecies, resultTypes;

    if (structural) {
        resultSpecies = archiveDgvaDbAdaptor.countStudiesPerSpecies(queryOptions);
        resultTypes = archiveDgvaDbAdaptor.countStudiesPerType(queryOptions);
    } else {
        resultSpecies = archiveEvaproDbAdaptor.countStudiesPerSpecies(queryOptions);
        resultTypes = archiveEvaproDbAdaptor.countStudiesPerType(queryOptions);
    }

    QueryResult combinedQueryResult = new QueryResult();
    combinedQueryResult.setDbTime(resultSpecies.getDbTime() + resultTypes.getDbTime());

    JsonNodeFactory factory = new JsonNodeFactory(true);
    ObjectNode root = factory.objectNode();
    combinedQueryResult.addResult(root);
    combinedQueryResult.setNumTotalResults(combinedQueryResult.getNumResults());

    // Species
    ObjectNode speciesNode = factory.objectNode();
    for (Map.Entry<String, Integer> speciesCount : resultSpecies.getResult()) {
        speciesNode.put(speciesCount.getKey(), speciesCount.getValue());
    }
    root.put("species", speciesNode);

    // Types
    ObjectNode typesNode = factory.objectNode();
    for (Map.Entry<String, Integer> typesCount : resultTypes.getResult()) {
        typesNode.put(typesCount.getKey(), typesCount.getValue());
    }
    root.put("type", typesNode);

    return setQueryResponse(combinedQueryResult);
}

From source file:com.redhat.lightblue.metadata.rdbms.impl.RDBMSPropertyParserImplTest.java

@Test
public void testParse_Fetch() throws IOException {
    MetadataParser<JsonNode> p = new JSONMetadataParser(new Extensions<JsonNode>(), new DefaultTypes(),
            new JsonNodeFactory(true));

    JsonNode node = loadJsonNode("RdbmsMetadataTest-fetch.json");

    RDBMS rdbms = new RDBMSPropertyParserImpl<JsonNode>().parse(RDBMSPropertyParserImpl.NAME, p,
            node.get("rdbms"));

    assertEquals(DialectOperators.ORACLE, rdbms.getDialect());
    assertNull(rdbms.getDelete());/*from  w  w  w.  j a v  a  2s  .com*/
    assertNotNull(rdbms.getFetch());
    assertNull(rdbms.getInsert());
    assertNull(rdbms.getSave());
    assertNull(rdbms.getUpdate());
}

From source file:com.cloudera.cdk.morphline.json.JsonMorphlineTest.java

@Test
public void testComplexDocuments() throws Exception {
    morphline = createMorphline("test-morphlines/extractJsonPaths");
    File file = new File(RESOURCES_DIR + "/test-documents/complex.json");
    InputStream in = new FileInputStream(file);
    Record record = new Record();
    record.put(Fields.ATTACHMENT_BODY, in);

    startSession();//from   w  ww .java  2  s  .co  m
    assertEquals(1, collector.getNumStartEvents());
    assertTrue(morphline.process(record));

    assertEquals(1, collector.getRecords().size());
    JsonNode rootNode = (JsonNode) new ObjectMapper().reader(JsonNode.class).readValues(file).next();
    JsonNodeFactory factory = new JsonNodeFactory(false);

    assertEquals(Arrays.asList(10), collector.getFirstRecord().get("/docId"));
    assertEquals(Arrays.asList(rootNode.get("links")), collector.getFirstRecord().get("/links"));

    assertEquals(Arrays.asList(factory.arrayNode()), collector.getFirstRecord().get("/links/backward"));
    assertEquals(factory.arrayNode(), rootNode.get("links").get("backward"));

    List expected = Arrays.asList(factory.arrayNode().add(20).add(40).add(60).add(true).add(false).add(32767)
            .add(2147483647).add(9223372036854775807L).add(1.23).add(1.7976931348623157E308));
    assertEquals(expected, collector.getFirstRecord().get("/links/forward"));
    assertEquals(expected, collector.getFirstRecord().get("/links/forward/[]"));
    assertEquals(expected, collector.getFirstRecord().get("/links/forward[]"));
    assertEquals(Arrays.asList(rootNode.get("name")), collector.getFirstRecord().get("/name"));
    assertEquals(Arrays.asList("en-us", "en", "en-gb"),
            collector.getFirstRecord().get("/name/[]/language/[]/code"));
    assertEquals(Arrays.asList("en-us", "en", "en-gb"),
            collector.getFirstRecord().get("/name[]/language[]/code"));
    assertEquals(Arrays.asList("us", "gb"), collector.getFirstRecord().get("/name/[]/language/[]/country"));
    assertEquals(Arrays.asList("us", "gb"), collector.getFirstRecord().get("/name[]/language[]/country"));
    assertEquals(Arrays.asList(), collector.getFirstRecord().get("/unknownField"));
    assertEquals(Arrays.asList(true), collector.getFirstRecord().get("/links/bool"));
    assertEquals(Arrays.asList(32767), collector.getFirstRecord().get("/links/short"));
    assertEquals(Arrays.asList(2147483647), collector.getFirstRecord().get("/links/int"));
    assertEquals(Arrays.asList(9223372036854775807L), collector.getFirstRecord().get("/links/long"));
    assertEquals(Arrays.asList(1.7976931348623157E308), collector.getFirstRecord().get("/links/double"));

    in.close();
}

From source file:com.redhat.lightblue.metadata.rdbms.impl.RDBMSPropertyParserImplTest.java

@Test
public void testParse_Insert() throws IOException {
    MetadataParser<JsonNode> p = new JSONMetadataParser(new Extensions<JsonNode>(), new DefaultTypes(),
            new JsonNodeFactory(true));

    JsonNode node = loadJsonNode("RdbmsMetadataTest-insert.json");

    RDBMS rdbms = new RDBMSPropertyParserImpl<JsonNode>().parse(RDBMSPropertyParserImpl.NAME, p,
            node.get("rdbms"));

    assertEquals(DialectOperators.ORACLE, rdbms.getDialect());
    assertNull(rdbms.getDelete());//from  w ww  .ja  va 2 s  .  c om
    assertNull(rdbms.getFetch());
    assertNotNull(rdbms.getInsert());
    assertNull(rdbms.getSave());
    assertNull(rdbms.getUpdate());
}

From source file:uk.ac.ucl.excites.sapelli.collector.SapColCmdLn.java

/**
 * @param sapFile//from  w ww  . j a  v a 2s.com
 * @param project
 * @throws IOException
 * @see https://github.com/ExCiteS/geokey-sapelli
 */
static public void printProjectInfoForGeoKey(File sapFile, Project project) throws IOException {
    GeoKeyFormDescriber gkFormDescriber = new GeoKeyFormDescriber();

    // Create the node factory that gives us nodes.
    JsonNodeFactory factory = new JsonNodeFactory(false);

    // create a json factory to write the treenode as json. for the example
    // we just write to console
    JsonFactory jsonFactory = new JsonFactory();
    JsonGenerator generator = jsonFactory.createGenerator(System.out);
    ObjectMapper mapper = new ObjectMapper();

    // the root node
    ObjectNode projectJSON = factory.objectNode();

    // describe project:
    projectJSON.put("name", project.getName());
    projectJSON.put("variant", project.getVariant());
    projectJSON.put("version", project.getVersion());
    projectJSON.put("display_name", project.toString(false));
    projectJSON.put("sapelli_id", project.getID());
    projectJSON.put("sapelli_fingerprint", project.getFingerPrint());
    projectJSON.put("sapelli_model_id", project.getModel().id);
    projectJSON.put("installation_path", fsp.getProjectInstallationFolder(project, false).getAbsolutePath());
    ArrayNode formsJSON = factory.arrayNode();
    for (Form frm : project.getForms()) {
        ObjectNode formNode = gkFormDescriber.getFormJSON(frm);
        if (formNode != null)
            formsJSON.add(formNode);
    }
    projectJSON.set("forms", formsJSON);

    // Serialise:
    mapper.writeTree(generator, projectJSON);
}

From source file:com.redhat.lightblue.metadata.rdbms.impl.RDBMSPropertyParserImplTest.java

@Test
public void testParse_Save() throws IOException {
    MetadataParser<JsonNode> p = new JSONMetadataParser(new Extensions<JsonNode>(), new DefaultTypes(),
            new JsonNodeFactory(true));

    JsonNode node = loadJsonNode("RdbmsMetadataTest-save.json");

    RDBMS rdbms = new RDBMSPropertyParserImpl<JsonNode>().parse(RDBMSPropertyParserImpl.NAME, p,
            node.get("rdbms"));

    assertEquals(DialectOperators.ORACLE, rdbms.getDialect());
    assertNull(rdbms.getDelete());/*from   ww  w  .  j  a  v  a2s  .c  o  m*/
    assertNull(rdbms.getFetch());
    assertNull(rdbms.getInsert());
    assertNotNull(rdbms.getSave());
    assertNull(rdbms.getUpdate());
}

From source file:squash.booking.lambdas.core.BackupManager.java

@Override
public final ImmutablePair<List<Booking>, List<BookingRule>> backupAllBookingsAndBookingRules()
        throws Exception {

    if (!initialised) {
        throw new IllegalStateException("The backup manager has not been initialised");
    }//  ww w .j  a  va2 s.  c  om

    // Encode bookings and booking rules as JSON
    JsonNodeFactory factory = new JsonNodeFactory(false);
    // Create a json factory to write the treenode as json.
    JsonFactory jsonFactory = new JsonFactory();
    ObjectNode rootNode = factory.objectNode();

    ArrayNode bookingsNode = rootNode.putArray("bookings");
    List<Booking> bookings = bookingManager.getAllBookings(false);
    for (Booking booking : bookings) {
        bookingsNode.add((JsonNode) (mapper.valueToTree(booking)));
    }

    ArrayNode bookingRulesNode = rootNode.putArray("bookingRules");
    List<BookingRule> bookingRules = ruleManager.getRules(false);
    for (BookingRule bookingRule : bookingRules) {
        bookingRulesNode.add((JsonNode) (mapper.valueToTree(bookingRule)));
    }

    // Add this, as will be needed for restore in most common case.
    rootNode.put("clearBeforeRestore", true);

    ByteArrayOutputStream backupDataStream = new ByteArrayOutputStream();
    PrintStream printStream = new PrintStream(backupDataStream);
    try (JsonGenerator generator = jsonFactory.createGenerator(printStream)) {
        mapper.writeTree(generator, rootNode);
    }
    String backupString = backupDataStream.toString(StandardCharsets.UTF_8.name());

    logger.log("Backing up all bookings and booking rules to S3 bucket");
    IS3TransferManager transferManager = getS3TransferManager();
    byte[] backupAsBytes = backupString.getBytes(StandardCharsets.UTF_8);
    ByteArrayInputStream backupAsStream = new ByteArrayInputStream(backupAsBytes);
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentLength(backupAsBytes.length);
    PutObjectRequest putObjectRequest = new PutObjectRequest(databaseBackupBucketName,
            "AllBookingsAndBookingRules", backupAsStream, metadata);
    TransferUtils.waitForS3Transfer(transferManager.upload(putObjectRequest), logger);
    logger.log("Backed up all bookings and booking rules to S3 bucket: " + backupString);

    // Backup to the SNS topic
    logger.log("Backing up all bookings and booking rules to SNS topic: " + adminSnsTopicArn);
    getSNSClient().publish(adminSnsTopicArn, backupString, "Sqawsh all-bookings and booking rules backup");

    return new ImmutablePair<>(bookings, bookingRules);
}

From source file:com.redhat.lightblue.metadata.mongo.MongoMetadataTest.java

@Test
public void createMdWithAndRefTest() throws Exception {
    Extensions<JsonNode> extensions = new Extensions<>();
    extensions.addDefaultExtensions();//from  w w w . j  a v a 2s .c o m
    extensions.registerDataStoreParser("mongo", new MongoDataStoreParser<JsonNode>());
    JSONMetadataParser parser = new JSONMetadataParser(extensions, new DefaultTypes(),
            new JsonNodeFactory(true));

    // get JsonNode representing metadata
    JsonNode jsonMetadata = AbstractJsonNodeTest
            .loadJsonNode(getClass().getSimpleName() + "-qps-andquery.json");

    // parser into EntityMetadata
    EntityMetadata e = parser.parseEntityMetadata(jsonMetadata);

    // persist
    md.createNewMetadata(e);
    EntityMetadata g = md.getEntityMetadata("test", "1.0.0");
    // No exception=OK
}

From source file:com.redhat.lightblue.metadata.rdbms.impl.RDBMSPropertyParserImplTest.java

@Test
public void testParse_Update() throws IOException {
    MetadataParser<JsonNode> p = new JSONMetadataParser(new Extensions<JsonNode>(), new DefaultTypes(),
            new JsonNodeFactory(true));

    JsonNode node = loadJsonNode("RdbmsMetadataTest-update.json");

    RDBMS rdbms = new RDBMSPropertyParserImpl<JsonNode>().parse(RDBMSPropertyParserImpl.NAME, p,
            node.get("rdbms"));

    assertEquals(DialectOperators.ORACLE, rdbms.getDialect());
    assertNull(rdbms.getDelete());//from w  w  w  . ja  v a2  s . c o  m
    assertNull(rdbms.getFetch());
    assertNull(rdbms.getInsert());
    assertNull(rdbms.getSave());
    assertNotNull(rdbms.getUpdate());
}