Example usage for com.fasterxml.jackson.core JsonFactory JsonFactory

List of usage examples for com.fasterxml.jackson.core JsonFactory JsonFactory

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonFactory JsonFactory.

Prototype

public JsonFactory() 

Source Link

Document

Default constructor used to create factory instances.

Usage

From source file:com.tage.calcite.adapter.druid.DruidConnectionImpl.java

/** Parses the output of a {@code topN} query, sending the results to a
 * {@link Sink}. *///from w  ww . j av a 2 s  .  c  om
private void parse(com.tage.calcite.adapter.druid.QueryType queryType, InputStream in, Sink sink,
        List<String> fieldNames, List<Primitive> fieldTypes, Page page) {
    final JsonFactory factory = new JsonFactory();
    final Row.RowBuilder rowBuilder = Row.newBuilder(fieldNames.size());

    if (CalcitePrepareImpl.DEBUG) {
        try {
            final byte[] bytes = AvaticaUtils.readFullyToBytes(in);
            System.out.println("Response: " + new String(bytes));
            in = new ByteArrayInputStream(bytes);
        } catch (IOException e) {
            throw Throwables.propagate(e);
        }
    }

    try (final JsonParser parser = factory.createParser(in)) {
        switch (queryType) {
        case TOP_N:
            if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) {
                expectScalarField(parser, "timestamp");
                if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result")
                        && parser.nextToken() == JsonToken.START_ARRAY) {
                    while (parser.nextToken() == JsonToken.START_OBJECT) {
                        // loop until token equal to "}"
                        parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                        sink.send(rowBuilder.build());
                        rowBuilder.reset();
                    }
                }
            }
            break;

        case SELECT:
            if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) {
                page.pagingIdentifier = null;
                page.offset = -1;
                expectScalarField(parser, "timestamp");
                if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result")
                        && parser.nextToken() == JsonToken.START_OBJECT) {
                    if (parser.nextToken() == JsonToken.FIELD_NAME
                            && parser.getCurrentName().equals("pagingIdentifiers")
                            && parser.nextToken() == JsonToken.START_OBJECT) {
                        switch (parser.nextToken()) {
                        case FIELD_NAME:
                            page.pagingIdentifier = parser.getCurrentName();
                            if (parser.nextToken() == JsonToken.VALUE_NUMBER_INT) {
                                page.offset = parser.getIntValue();
                            }
                            expect(parser, JsonToken.END_OBJECT);
                            break;
                        case END_OBJECT:
                        }
                    }
                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("events")
                            && parser.nextToken() == JsonToken.START_ARRAY) {
                        while (parser.nextToken() == JsonToken.START_OBJECT) {
                            expectScalarField(parser, "segmentId");
                            expectScalarField(parser, "offset");
                            if (parser.nextToken() == JsonToken.FIELD_NAME
                                    && parser.getCurrentName().equals("event")
                                    && parser.nextToken() == JsonToken.START_OBJECT) {
                                parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                                sink.send(rowBuilder.build());
                                rowBuilder.reset();
                            }
                            expect(parser, JsonToken.END_OBJECT);
                        }
                        parser.nextToken();
                    }
                }
            }
            break;

        case GROUP_BY:
            if (parser.nextToken() == JsonToken.START_ARRAY) {
                while (parser.nextToken() == JsonToken.START_OBJECT) {
                    expectScalarField(parser, "version");
                    expectScalarField(parser, "timestamp");
                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("event")
                            && parser.nextToken() == JsonToken.START_OBJECT) {
                        parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                        sink.send(rowBuilder.build());
                        rowBuilder.reset();
                    }
                    expect(parser, JsonToken.END_OBJECT);
                }
            }
        }
    } catch (IOException | InterruptedException e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.ryan.ryanreader.jsonwrap.JsonValue.java

/**
 * Begins parsing a JSON stream into a tree structure. The JsonValue object
 * created contains the value at the root of the tree.
 * /*from  w w  w . j ava2  s  . co  m*/
 * This constructor will block until the first JSON token is received. To
 * continue building the tree, the "build" method (inherited from
 * JsonBuffered) must be called in another thread.
 * 
 * @param source
 *         The source of incoming JSON data.
 * @throws java.io.IOException
 */
public JsonValue(final byte[] source) throws IOException {
    this(new JsonFactory().createParser(source));
}

From source file:org.cloudcoder.dataanalysis.ProgsnapExport.java

private String encodeLine(String tagname, Object value) throws IOException {
    StringWriter sw = new StringWriter();
    JsonFactory factory = new JsonFactory();
    JsonGenerator jg = factory.createGenerator(sw);
    jg.writeStartObject();/*from  w w w.j a v  a2  s . c o  m*/
    jg.writeStringField("tag", tagname);
    jg.writeFieldName("value");
    writeJsonFieldValue(jg, value);
    jg.writeEndObject();
    jg.close();
    return sw.toString();
}

From source file:org.messic.server.api.tagwizard.discogs.DiscogsTAGWizardPlugin.java

@Override
public List<Album> getAlbumInfo(Album albumHelpInfo, File[] files) {
    if (albumHelpInfo == null || (albumHelpInfo.name == null && albumHelpInfo.author == null)
            || ((albumHelpInfo.name != null && albumHelpInfo.name.length() <= 0)
                    && (albumHelpInfo.author != null && albumHelpInfo.author.length() <= 0))) {
        return new ArrayList<Album>();
    }/*from w w  w.  j ava  2 s  .  c om*/

    String baseURL = "http://api.discogs.com/database/search?type=release";

    try {
        if (albumHelpInfo.name != null) {
            baseURL = baseURL + "&release_title=" + URLEncoder.encode(albumHelpInfo.name, "UTF-8") + "";
        }
        if (albumHelpInfo.author != null) {
            baseURL = baseURL + "&artist=" + URLEncoder.encode(albumHelpInfo.author, "UTF-8") + "";
        }

        URL url = new URL(baseURL);
        Proxy proxy = getProxy();
        URLConnection uc = (proxy != null ? url.openConnection(proxy) : url.openConnection());
        uc.setRequestProperty("User-Agent", "Messic/1.0 +http://spheras.github.io/messic/");

        ArrayList<Album> result = new ArrayList<Album>();

        JsonFactory jsonFactory = new JsonFactory(); // or, for data binding,
        JsonParser jParser = jsonFactory.createParser(uc.getInputStream());
        while (jParser.nextToken() != null) {
            String fieldname = jParser.getCurrentName();
            if ("id".equals(fieldname)) {
                jParser.nextToken();
                String id = jParser.getText();
                // one second per petition allowed by discogs
                Thread.sleep(1000);

                Album album = getAlbum(id);

                result.add(album);
            }

        }
        return result;
    } catch (Exception e) {
        log.error("failed!", e);
    }

    return null;
}

From source file:com.ning.metrics.action.hdfs.reader.HdfsListing.java

@SuppressWarnings({ "unchecked", "unused" })
public void toJson(final OutputStream out, final boolean pretty) throws IOException {
    final String parentPath = getParentPath() == null ? "" : getParentPath();

    final JsonGenerator generator = new JsonFactory().createJsonGenerator(out);
    generator.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
    if (pretty) {
        generator.setPrettyPrinter(new DefaultPrettyPrinter());
    }/*  www .  j  a  va  2  s. c  o m*/

    generator.writeStartObject();
    generator.writeObjectField(JSON_LISTING_PATH, getPath());
    generator.writeObjectField(JSON_LISTING_PARENT_PATH, parentPath);
    generator.writeArrayFieldStart(JSON_LISTING_ENTRIES);
    // Important: need to flush before appending pre-serialized events
    generator.flush();

    for (HdfsEntry entry : getEntries()) {
        entry.toJson(generator);
    }
    generator.writeEndArray();

    generator.writeEndObject();
    generator.close();
}

From source file:innovimax.quixproc.datamodel.generator.json.AJSONGenerator.java

public static void main(String[] args)
        throws JsonParseException, IOException, InstantiationException, IllegalAccessException {

    /*//from  www.  j  a v a2s  .c om
     * final byte[][] patterns = { // empty object is allowed
     * 
     * "\"A\":1".getBytes(), // first used only once ",\"A\":1".getBytes()
     * }; BoxedArray baA = new BoxedArray(patterns, 1, 2); for (int i = 0; i
     * <Integer.MAX_VALUE; i++) { baA.nextUnique(); }
     * 
     * 
     * System.out.println(display(patterns[1]));
     */
    JsonFactory f = new JsonFactory();
    f.disable(Feature.ALLOW_COMMENTS);
    f.disable(Feature.ALLOW_SINGLE_QUOTES);
    // AGenerator generator = instance(ATreeGenerator.Type.HIGH_DENSITY);
    AGenerator generator = instance(FileExtension.JSON, TreeType.HIGH_NODE_DEPTH, SpecialType.STANDARD);

    InputStream is = generator.getInputStream(50, Unit.MBYTE, Variation.NO_VARIATION);
    if (false) {
        int c;
        while ((c = is.read()) != -1) {
            System.out.println(display((byte) (c & 0xFF)));
        }
    } else {
        JsonParser p = f.createParser(is);
        p.enable(Feature.STRICT_DUPLICATE_DETECTION);

        while (p.nextToken() != JsonToken.END_OBJECT) {
            //
        }
    }
}

From source file:com.quinsoft.zeidon.standardoe.ActivateOisFromJsonStream.java

public List<View> read() {
    try {//from  www .  jav  a 2  s .co m
        JsonFactory jsonFactory = new JsonFactory();
        jp = jsonFactory.createParser(stream);
        jp.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);

        // Read the START_OBJECT
        JsonToken token = jp.nextToken();
        if (token != JsonToken.START_OBJECT)
            throw new ZeidonException("OI JSON stream doesn't start with object.");

        token = jp.nextToken();
        if (token != JsonToken.FIELD_NAME)
            throw new ZeidonException("OI JSON missing OI field name.");

        String fieldName = jp.getCurrentName();
        if (fieldName.equals(".meta")) {
            readFileMeta();

            JsonReader reader = getReaderForVersion();
            reader.process();
        } else {
            if (StringUtils.equalsIgnoreCase(fieldName, "version")) {
                token = jp.nextToken(); // Move to value.
                version = jp.getValueAsString();
                token = jp.nextToken(); // Move to next field name.
                assert token == JsonToken.FIELD_NAME;
                fieldName = jp.getCurrentName();
            } else if (StringUtils.isBlank(options.getVersion())) {
                throw new ZeidonException("First field must be version");
            }

            totalRootCount = null;
            if (StringUtils.equalsIgnoreCase(fieldName, "totalRootCount")) {
                token = jp.nextToken(); // Move to value.
                totalRootCount = jp.getValueAsInt();
                token = jp.nextToken(); // Move to next field name.
                assert token == JsonToken.FIELD_NAME;
                fieldName = jp.getCurrentName();
            }

            if (lodDef == null)
                throw new ZeidonException("JSON stream appears to start with the root entity name (%s)"
                        + " but the LodDef has not been specified.", fieldName);

            String rootName = lodDef.getRoot().getName();
            if (!fieldName.equalsIgnoreCase(rootName))
                throw new ZeidonException("The first field in the JSON stream must be the root entity name"
                        + " (%s) or '.meta' but was %s.", rootName, fieldName);

            view = task.activateEmptyObjectInstance(lodDef);
            returnList.add(view);
            if (totalRootCount != null)
                view.setTotalRootCount(totalRootCount);

            JsonReader reader = getSimpleReaderForVersion();
            reader.process();
        }

        jp.close();
    } catch (Exception e) {
        ZeidonException ze = ZeidonException.wrapException(e);
        JsonLocation loc = jp.getCurrentLocation();
        JsonToken token = jp.getCurrentToken();
        ze.appendMessage("Position line=%d col=%d, token=%s", loc.getLineNr(), loc.getColumnNr(),
                token == null ? "No Token" : token.name());
        throw ze;
    }

    return returnList;
}

From source file:de.terrestris.shogun.security.ShogunAuthProcessingFilter.java

/**
 * On successful authentication by an Authentication Manager of Spring Security
 * we intercept with this method  and change the respone to include the ROLES of
 * the logged in user.//from ww  w  .jav a2s.  c o m
 * This way we can react on the ROLES and redirect accordingly within the requesting login form (here login.js)
 *
 * @see WebContent/client/login.js
 */
@Override
protected void successfulAuthentication(HttpServletRequest request, HttpServletResponse response,
        Authentication authResult) throws IOException, ServletException {
    SecurityContextHolder.getContext().setAuthentication(authResult);

    SavedRequestAwareAuthenticationSuccessHandler srh = new SavedRequestAwareAuthenticationSuccessHandler();
    this.setAuthenticationSuccessHandler(srh);
    srh.setRedirectStrategy(new RedirectStrategy() {
        @Override
        public void sendRedirect(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse,
                String s) throws IOException {
            //do nothing, no redirect
        }
    });
    super.successfulAuthentication(request, response, authResult);

    // build a comma separated string of the ROLES
    String authorityText = StringUtils.join(authResult.getAuthorities(), ",");

    // write the servlet return object
    HttpServletResponseWrapper responseWrapper = new HttpServletResponseWrapper(response);
    Writer out = responseWrapper.getWriter();
    JsonFactory jsonFactory = new JsonFactory();
    JsonGenerator jsonGenerator = jsonFactory.createJsonGenerator(out);
    jsonGenerator.writeStartObject();
    jsonGenerator.writeBooleanField("success", true);
    jsonGenerator.writeStringField("name", authResult.getName());
    jsonGenerator.writeStringField("role", authorityText);
    jsonGenerator.writeEndObject();
    jsonGenerator.close();
}

From source file:org.kiji.rest.representations.KijiRestEntityId.java

/**
 * Create KijiRestEntityId from a string input, which can be a json string or a raw hbase rowKey.
 * This method is used for entity ids specified from the URL.
 *
 * @param entityId string of the row.//from  w  w  w  .j a  v a2s.  com
 * @param layout of the table in which the entity id belongs.
 *        If null, then long components may not be recognized.
 * @return a properly constructed KijiRestEntityId.
 * @throws IOException if KijiRestEntityId can not be properly constructed.
 */
public static KijiRestEntityId createFromUrl(final String entityId, final KijiTableLayout layout)
        throws IOException {
    if (entityId.startsWith(HBASE_ROW_KEY_PREFIX) || entityId.startsWith(HBASE_HEX_ROW_KEY_PREFIX)) {
        return new KijiRestEntityId(entityId);
    } else {
        final JsonParser parser = new JsonFactory().createJsonParser(entityId).enable(Feature.ALLOW_COMMENTS)
                .enable(Feature.ALLOW_SINGLE_QUOTES).enable(Feature.ALLOW_UNQUOTED_FIELD_NAMES);
        final JsonNode node = BASIC_MAPPER.readTree(parser);
        return create(node, layout);
    }
}

From source file:org.h2gis.drivers.geojson.GeoJsonWriteDriver.java

/**
 * Write the spatial table to GeoJSON format.
 *
 * @param progress/*from  w ww . j  a  v a  2  s. c o m*/
 * @throws SQLException
 */
private void writeGeoJson(ProgressVisitor progress) throws SQLException, IOException {
    FileOutputStream fos = null;
    try {
        fos = new FileOutputStream(fileName);
        // Read Geometry Index and type
        final TableLocation parse = TableLocation.parse(tableName,
                JDBCUtilities.isH2DataBase(connection.getMetaData()));
        List<String> spatialFieldNames = SFSUtilities.getGeometryFields(connection, parse);
        if (spatialFieldNames.isEmpty()) {
            throw new SQLException(String.format("The table %s does not contain a geometry field", tableName));
        }

        // Read table content
        Statement st = connection.createStatement();
        try {
            JsonFactory jsonFactory = new JsonFactory();
            JsonGenerator jsonGenerator = jsonFactory.createGenerator(new BufferedOutputStream(fos),
                    JsonEncoding.UTF8);

            // header of the GeoJSON file
            jsonGenerator.writeStartObject();
            jsonGenerator.writeStringField("type", "FeatureCollection");
            writeCRS(jsonGenerator,
                    SFSUtilities.getAuthorityAndSRID(connection, parse, spatialFieldNames.get(0)));
            jsonGenerator.writeArrayFieldStart("features");

            ResultSet rs = st.executeQuery(String.format("select * from `%s`", tableName));

            try {
                ResultSetMetaData resultSetMetaData = rs.getMetaData();
                int geoFieldIndex = JDBCUtilities.getFieldIndex(resultSetMetaData, spatialFieldNames.get(0));

                cacheMetadata(resultSetMetaData);
                while (rs.next()) {
                    writeFeature(jsonGenerator, rs, geoFieldIndex);
                }
                progress.endStep();
                // footer
                jsonGenerator.writeEndArray();
                jsonGenerator.writeEndObject();
                jsonGenerator.flush();
                jsonGenerator.close();

            } finally {
                rs.close();
            }
        } finally {
            st.close();
        }
    } catch (FileNotFoundException ex) {
        throw new SQLException(ex);

    } finally {
        try {
            if (fos != null) {
                fos.close();
            }
        } catch (IOException ex) {
            throw new SQLException(ex);
        }
    }
}