Example usage for com.fasterxml.jackson.core JsonGenerator writeFieldName

List of usage examples for com.fasterxml.jackson.core JsonGenerator writeFieldName

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonGenerator writeFieldName.

Prototype

public abstract void writeFieldName(SerializableString name) throws IOException, JsonGenerationException;

Source Link

Document

Method similar to #writeFieldName(String) , main difference being that it may perform better as some of processing (such as quoting of certain characters, or encoding into external encoding if supported by generator) can be done just once and reused for later calls.

Usage

From source file:com.tage.calcite.adapter.druid.DruidQuery.java

private QuerySpec getQuery(RelDataType rowType, RexNode filter, List<RexNode> projects,
        ImmutableBitSet groupSet, List<AggregateCall> aggCalls, List<String> aggNames) {
    QueryType queryType = QueryType.SELECT;
    final Translator translator = new Translator(druidTable, rowType);
    List<String> fieldNames = rowType.getFieldNames();

    Json jsonFilter = null;/*from   www.  j a va  2 s.  co m*/
    if (filter != null) {
        jsonFilter = translator.translateFilter(filter);
        translator.metrics.clear();
        translator.dimensions.clear();
    }

    if (projects != null) {
        final ImmutableList.Builder<String> builder = ImmutableList.builder();
        for (RexNode project : projects) {
            builder.add(translator.translate(project));
        }
        fieldNames = builder.build();
    }

    final List<String> dimensions = new ArrayList<>();
    final List<JsonAggregation> aggregations = new ArrayList<>();

    if (groupSet != null) {
        assert aggCalls != null;
        assert aggNames != null;
        assert aggCalls.size() == aggNames.size();
        queryType = QueryType.GROUP_BY;

        final ImmutableList.Builder<String> builder = ImmutableList.builder();
        for (int groupKey : groupSet) {
            final String s = fieldNames.get(groupKey);
            dimensions.add(s);
            builder.add(s);
        }
        for (Pair<AggregateCall, String> agg : Pair.zip(aggCalls, aggNames)) {
            final JsonAggregation jsonAggregation = getJsonAggregation(fieldNames, agg.right, agg.left);
            aggregations.add(jsonAggregation);
            builder.add(jsonAggregation.name);
        }
        fieldNames = builder.build();
    } else {
        assert aggCalls == null;
        assert aggNames == null;
    }

    final StringWriter sw = new StringWriter();
    final JsonFactory factory = new JsonFactory();
    try {
        final JsonGenerator generator = factory.createGenerator(sw);

        switch (queryType) {
        case GROUP_BY:
            generator.writeStartObject();

            if (aggregations.isEmpty()) {
                // Druid requires at least one aggregation, otherwise gives:
                //   Must have at least one AggregatorFactory
                aggregations.add(new JsonAggregation("longSum", "unit_sales", "unit_sales"));
            }

            generator.writeStringField("queryType", "groupBy");
            generator.writeStringField("dataSource", druidTable.dataSource);
            generator.writeStringField("granularity", "all");
            writeField(generator, "dimensions", dimensions);
            writeFieldIf(generator, "limitSpec", null);
            writeFieldIf(generator, "filter", jsonFilter);
            writeField(generator, "aggregations", aggregations);
            writeFieldIf(generator, "postAggregations", null);
            writeField(generator, "intervals", druidTable.intervals);
            writeFieldIf(generator, "having", null);

            generator.writeEndObject();
            break;

        case SELECT:
            generator.writeStartObject();

            generator.writeStringField("queryType", "select");
            generator.writeStringField("dataSource", druidTable.dataSource);
            generator.writeStringField("descending", "false");
            writeField(generator, "intervals", druidTable.intervals);
            writeFieldIf(generator, "filter", jsonFilter);
            writeField(generator, "dimensions", translator.dimensions);
            writeField(generator, "metrics", translator.metrics);
            generator.writeStringField("granularity", "all");

            generator.writeFieldName("pagingSpec");
            generator.writeStartObject();
            final int fetch = CalciteConnectionProperty.DRUID_FETCH.wrap(new Properties()).getInt();
            generator.writeNumberField("threshold", fetch);
            generator.writeEndObject();

            generator.writeEndObject();
            break;

        default:
            throw new AssertionError("unknown query type " + queryType);
        }

        generator.close();
    } catch (IOException e) {
        e.printStackTrace();
    }

    return new QuerySpec(queryType, sw.toString(), fieldNames);
}

From source file:org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator.java

public static String generatePlan(JobGraph jg) {
    try {//from  w  ww.ja va 2  s. c o  m
        final StringWriter writer = new StringWriter(1024);

        final JsonFactory factory = new JsonFactory();
        final JsonGenerator gen = factory.createGenerator(writer);

        // start of everything
        gen.writeStartObject();
        gen.writeStringField("jid", jg.getJobID().toString());
        gen.writeStringField("name", jg.getName());
        gen.writeArrayFieldStart("nodes");

        // info per vertex
        for (JobVertex vertex : jg.getVertices()) {

            String operator = vertex.getOperatorName() != null ? vertex.getOperatorName() : NOT_SET;

            String operatorDescr = vertex.getOperatorDescription() != null ? vertex.getOperatorDescription()
                    : NOT_SET;

            String optimizerProps = vertex.getResultOptimizerProperties() != null
                    ? vertex.getResultOptimizerProperties()
                    : EMPTY;

            String description = vertex.getOperatorPrettyName() != null ? vertex.getOperatorPrettyName()
                    : vertex.getName();

            // make sure the encoding is HTML pretty
            description = StringEscapeUtils.escapeHtml4(description);
            description = description.replace("\n", "<br/>");
            description = description.replace("\\", "&#92;");

            operatorDescr = StringEscapeUtils.escapeHtml4(operatorDescr);
            operatorDescr = operatorDescr.replace("\n", "<br/>");

            gen.writeStartObject();

            // write the core properties
            gen.writeStringField("id", vertex.getID().toString());
            gen.writeNumberField("parallelism", vertex.getParallelism());
            gen.writeStringField("operator", operator);
            gen.writeStringField("operator_strategy", operatorDescr);
            gen.writeStringField("description", description);

            if (!vertex.isInputVertex()) {
                // write the input edge properties
                gen.writeArrayFieldStart("inputs");

                List<JobEdge> inputs = vertex.getInputs();
                for (int inputNum = 0; inputNum < inputs.size(); inputNum++) {
                    JobEdge edge = inputs.get(inputNum);
                    if (edge.getSource() == null) {
                        continue;
                    }

                    JobVertex predecessor = edge.getSource().getProducer();

                    String shipStrategy = edge.getShipStrategyName();
                    String preProcessingOperation = edge.getPreProcessingOperationName();
                    String operatorLevelCaching = edge.getOperatorLevelCachingDescription();

                    gen.writeStartObject();
                    gen.writeNumberField("num", inputNum);
                    gen.writeStringField("id", predecessor.getID().toString());

                    if (shipStrategy != null) {
                        gen.writeStringField("ship_strategy", shipStrategy);
                    }
                    if (preProcessingOperation != null) {
                        gen.writeStringField("local_strategy", preProcessingOperation);
                    }
                    if (operatorLevelCaching != null) {
                        gen.writeStringField("caching", operatorLevelCaching);
                    }

                    gen.writeStringField("exchange", edge.getSource().getResultType().name().toLowerCase());

                    gen.writeEndObject();
                }

                gen.writeEndArray();
            }

            // write the optimizer properties
            gen.writeFieldName("optimizer_properties");
            gen.writeRawValue(optimizerProps);

            gen.writeEndObject();
        }

        // end of everything
        gen.writeEndArray();
        gen.writeEndObject();

        gen.close();

        return writer.toString();
    } catch (Exception e) {
        throw new RuntimeException("Failed to generate plan", e);
    }
}

From source file:org.elasticsearch.client.sniff.ElasticsearchNodesSnifferTests.java

private static SniffResponse buildSniffResponse(ElasticsearchNodesSniffer.Scheme scheme) throws IOException {
    int numNodes = RandomNumbers.randomIntBetween(getRandom(), 1, 5);
    List<Node> nodes = new ArrayList<>(numNodes);
    JsonFactory jsonFactory = new JsonFactory();
    StringWriter writer = new StringWriter();
    JsonGenerator generator = jsonFactory.createGenerator(writer);
    generator.writeStartObject();//from   w  w  w.j  av  a  2 s  .  co  m
    if (getRandom().nextBoolean()) {
        generator.writeStringField("cluster_name", "elasticsearch");
    }
    if (getRandom().nextBoolean()) {
        generator.writeObjectFieldStart("bogus_object");
        generator.writeEndObject();
    }
    generator.writeObjectFieldStart("nodes");
    for (int i = 0; i < numNodes; i++) {
        String nodeId = RandomStrings.randomAsciiOfLengthBetween(getRandom(), 5, 10);
        String host = "host" + i;
        int port = RandomNumbers.randomIntBetween(getRandom(), 9200, 9299);
        HttpHost publishHost = new HttpHost(host, port, scheme.toString());
        Set<HttpHost> boundHosts = new HashSet<>();
        boundHosts.add(publishHost);

        if (randomBoolean()) {
            int bound = between(1, 5);
            for (int b = 0; b < bound; b++) {
                boundHosts.add(new HttpHost(host + b, port, scheme.toString()));
            }
        }

        int numAttributes = between(0, 5);
        Map<String, List<String>> attributes = new HashMap<>(numAttributes);
        for (int j = 0; j < numAttributes; j++) {
            int numValues = frequently() ? 1 : between(2, 5);
            List<String> values = new ArrayList<>();
            for (int v = 0; v < numValues; v++) {
                values.add(j + "value" + v);
            }
            attributes.put("attr" + j, values);
        }

        Node node = new Node(publishHost, boundHosts, randomAsciiAlphanumOfLength(5),
                randomAsciiAlphanumOfLength(5),
                new Node.Roles(randomBoolean(), randomBoolean(), randomBoolean()), attributes);

        generator.writeObjectFieldStart(nodeId);
        if (getRandom().nextBoolean()) {
            generator.writeObjectFieldStart("bogus_object");
            generator.writeEndObject();
        }
        if (getRandom().nextBoolean()) {
            generator.writeArrayFieldStart("bogus_array");
            generator.writeStartObject();
            generator.writeEndObject();
            generator.writeEndArray();
        }
        boolean isHttpEnabled = rarely() == false;
        if (isHttpEnabled) {
            nodes.add(node);
            generator.writeObjectFieldStart("http");
            generator.writeArrayFieldStart("bound_address");
            for (HttpHost bound : boundHosts) {
                generator.writeString(bound.toHostString());
            }
            generator.writeEndArray();
            if (getRandom().nextBoolean()) {
                generator.writeObjectFieldStart("bogus_object");
                generator.writeEndObject();
            }
            generator.writeStringField("publish_address", publishHost.toHostString());
            if (getRandom().nextBoolean()) {
                generator.writeNumberField("max_content_length_in_bytes", 104857600);
            }
            generator.writeEndObject();
        }

        List<String> roles = Arrays.asList(new String[] { "master", "data", "ingest" });
        Collections.shuffle(roles, getRandom());
        generator.writeArrayFieldStart("roles");
        for (String role : roles) {
            if ("master".equals(role) && node.getRoles().isMasterEligible()) {
                generator.writeString("master");
            }
            if ("data".equals(role) && node.getRoles().isData()) {
                generator.writeString("data");
            }
            if ("ingest".equals(role) && node.getRoles().isIngest()) {
                generator.writeString("ingest");
            }
        }
        generator.writeEndArray();

        generator.writeFieldName("version");
        generator.writeString(node.getVersion());
        generator.writeFieldName("name");
        generator.writeString(node.getName());

        if (numAttributes > 0) {
            generator.writeObjectFieldStart("attributes");
            for (Map.Entry<String, List<String>> entry : attributes.entrySet()) {
                if (entry.getValue().size() == 1) {
                    generator.writeStringField(entry.getKey(), entry.getValue().get(0));
                } else {
                    for (int v = 0; v < entry.getValue().size(); v++) {
                        generator.writeStringField(entry.getKey() + "." + v, entry.getValue().get(v));
                    }
                }
            }
            generator.writeEndObject();
        }
        generator.writeEndObject();
    }
    generator.writeEndObject();
    generator.writeEndObject();
    generator.close();
    return SniffResponse.buildResponse(writer.toString(), nodes);
}

From source file:com.google.openrtb.json.OpenRtbJsonWriter.java

@SuppressWarnings("deprecation")
protected void writeVideoFields(Video video, JsonGenerator gen) throws IOException {
    if (checkRequired(video.getMimesCount())) {
        writeStrings("mimes", video.getMimesList(), gen);
    }/*from w  w  w  . j  a v  a  2  s  . co  m*/
    if (video.hasMinduration()) {
        gen.writeNumberField("minduration", video.getMinduration());
    }
    if (video.hasMaxduration()) {
        gen.writeNumberField("maxduration", video.getMaxduration());
    }
    if (video.hasProtocol()) {
        gen.writeNumberField("protocol", video.getProtocol().getNumber());
    }
    if (checkRequired(video.getProtocolsCount())) {
        writeEnums("protocols", video.getProtocolsList(), gen);
    }
    if (video.hasW()) {
        gen.writeNumberField("w", video.getW());
    }
    if (video.hasH()) {
        gen.writeNumberField("h", video.getH());
    }
    if (video.hasStartdelay()) {
        gen.writeNumberField("startdelay", video.getStartdelay());
    }
    if (video.hasLinearity()) {
        gen.writeNumberField("linearity", video.getLinearity().getNumber());
    }
    if (video.hasSequence()) {
        gen.writeNumberField("sequence", video.getSequence());
    }
    writeEnums("battr", video.getBattrList(), gen);
    if (video.hasMaxextended()) {
        gen.writeNumberField("maxextended", video.getMaxextended());
    }
    if (video.hasMinbitrate()) {
        gen.writeNumberField("minbitrate", video.getMinbitrate());
    }
    if (video.hasMaxbitrate()) {
        gen.writeNumberField("maxbitrate", video.getMaxbitrate());
    }
    if (video.hasBoxingallowed()) {
        writeIntBoolField("boxingallowed", video.getBoxingallowed(), gen);
    }
    writeEnums("playbackmethod", video.getPlaybackmethodList(), gen);
    writeEnums("delivery", video.getDeliveryList(), gen);
    if (video.hasPos()) {
        gen.writeNumberField("pos", video.getPos().getNumber());
    }
    if (video.getCompanionadCount() != 0) {
        // OpenRTB 2.2+
        gen.writeArrayFieldStart("companionad");
        for (Banner companionad : video.getCompanionadList()) {
            writeBanner(companionad, gen);
        }
        gen.writeEndArray();
    }
    if (video.hasCompanionad21()) {
        // OpenRTB 2.1-
        gen.writeFieldName("companionad");
        writeCompanionAd21(video.getCompanionad21(), gen);
    }
    writeEnums("api", video.getApiList(), gen);
    writeEnums("companiontype", video.getCompaniontypeList(), gen);
    if (video.hasSkip()) {
        writeIntBoolField("skip", video.getSkip(), gen);
    }
    if (video.hasSkipmin()) {
        gen.writeNumberField("skipmin", video.getSkipmin());
    }
    if (video.hasSkipafter()) {
        gen.writeNumberField("skipafter", video.getSkipafter());
    }
}

From source file:de.escalon.hypermedia.spring.de.escalon.hypermedia.spring.jackson.LinkListSerializer.java

private void writePossiblePropertyValues(JsonGenerator jgen, String currentVocab,
        ActionInputParameter actionInputParameter, Object[] possiblePropertyValues) throws IOException {
    // Enable the following to list possible values.
    // Problem: how to express individuals only for certain hydra:options
    // not all hydra:options should be taken as uris, sometimes they might be just literals
    // how to make that clear to the client?
    // maybe we must write them out for options
    //        if (possiblePropertyValues.length > 0) {
    //            jgen.writeArrayFieldStart("hydra:option");
    ////from   w  w w . j  a v a  2 s.com
    //            for (Object possibleValue : possiblePropertyValues) {
    //                // TODO: apply "hydra:option" : { "@type": "@vocab"} to context for enums
    //                writeScalarValue(jgen, possibleValue, actionInputParameter.getParameterType());
    //            }
    //            jgen.writeEndArray();
    //        }

    if (actionInputParameter.isArrayOrCollection()) {
        jgen.writeBooleanField(getPropertyOrClassNameInVocab(currentVocab, "multipleValues",
                JacksonHydraSerializer.HTTP_SCHEMA_ORG, "schema:"), true);
    }

    //  valueRequired (hard to say, using @Access on Event is for all update requests - or make
    //     specific request beans for different
    //     purposes rather than always passing an instance of e.g. Event?)
    //       -> update is a different use case than create - or maybe have an @Requires("eventStatus")
    //          annotation alongside requestBody to tell which attributes are required or writable, and use Requires over
    //          bean structure, where ctor with least length of args is required and setters are supported
    //          but optional? The bean structure does say what is writable for updates, but not what is required for creation. Right now setters are supportedProperties. For creation we would have to add constructor arguments as supportedProperties.
    //  (/) defaultValue (pre-filled value, e.g. list of selected items for option)
    //  valueName (for iri templates only)
    //  (/) readonlyValue (true for final public field or absence of setter, send fixed value like hidden field?) -> use hydra:readable, hydra:writable
    //  (/) multipleValues
    //  (/) valueMinLength
    //  (/) valueMaxLength
    //  (/) valuePattern
    //  minValue (DateTime support)
    //  maxValue (DateTime support)
    //  (/) stepValue
    final Map<String, Object> inputConstraints = actionInputParameter.getInputConstraints();

    if (actionInputParameter.hasCallValue()) {
        if (actionInputParameter.isArrayOrCollection()) {
            Object[] callValues = actionInputParameter.getCallValues();
            Class<?> componentType = callValues.getClass().getComponentType();
            // only write defaultValue for array of scalars
            if (DataType.isScalar(componentType)) {
                jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab, "defaultValue",
                        JacksonHydraSerializer.HTTP_SCHEMA_ORG, "schema:"));
                jgen.writeStartArray();
                for (Object callValue : callValues) {
                    writeScalarValue(jgen, callValue, componentType);
                }
                jgen.writeEndArray();
            }
        } else {
            jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab, "defaultValue",
                    JacksonHydraSerializer.HTTP_SCHEMA_ORG, "schema:"));

            writeScalarValue(jgen, actionInputParameter.getCallValueFormatted(),
                    actionInputParameter.getNestedParameterType());
        }
    }

    if (!inputConstraints.isEmpty()) {
        final List<String> keysToAppendValue = Arrays.asList(ActionInputParameter.MAX, ActionInputParameter.MIN,
                ActionInputParameter.STEP);
        for (String keyToAppendValue : keysToAppendValue) {
            final Object constraint = inputConstraints.get(keyToAppendValue);
            if (constraint != null) {
                jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab, keyToAppendValue + "Value",
                        JacksonHydraSerializer.HTTP_SCHEMA_ORG, "schema:"));
                jgen.writeNumber(constraint.toString());
            }
        }

        final List<String> keysToPrependValue = Arrays.asList(ActionInputParameter.MAX_LENGTH,
                ActionInputParameter.MIN_LENGTH, ActionInputParameter.PATTERN);
        for (String keyToPrependValue : keysToPrependValue) {
            final Object constraint = inputConstraints.get(keyToPrependValue);
            if (constraint != null) {
                jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab,
                        "value" + StringUtils.capitalize(keyToPrependValue),
                        JacksonHydraSerializer.HTTP_SCHEMA_ORG, "schema:"));
                if (ActionInputParameter.PATTERN.equals(keyToPrependValue)) {
                    jgen.writeString(constraint.toString());
                } else {
                    jgen.writeNumber(constraint.toString());
                }
            }
        }

    }

}

From source file:org.fluentd.jvmwatcher.parser.JsonSimpleLogParser.java

/**
 * @param generator/*from  w ww . j a va2s  .  c o m*/
 * @param state
 * @throws IOException 
 * @throws JsonGenerationException 
 */
private void outSimpleLog(JsonGenerator generator, JvmWatchState state)
        throws JsonGenerationException, IOException {
    Collection<JvmStateLog> logArray = state.getStateLog();

    // convert to JSON stream of JvmStateLog.
    for (JvmStateLog elem : logArray) {
        generator.writeStartObject();
        // Common 
        generator.writeNumberField(LOG_DATETIME, elem.getLogDateTime());
        generator.writeStringField(HOST_NAME, this.getHostName());
        generator.writeStringField(PROC_STATE, elem.getProcState().name());
        generator.writeNumberField(JVM_ID, state.getJvmId());
        generator.writeStringField(SHORT_NAME, state.getShortName());
        generator.writeStringField(DISPRAY_NAME, state.getDisplayName());
        // runtime
        generator.writeNumberField(START_TIME, state.getJvmStartTime());
        generator.writeNumberField(LOG_RUN_UP_TIME, elem.getJvmUpTime());
        // cpu usage
        generator.writeNumberField(LOG_CPU_USAGE, elem.getCpuUsage());
        // Compilation
        generator.writeNumberField(LOG_COMPILE_TIME, elem.getCompileTime());
        // Class loading
        generator.writeNumberField(LOG_CLASS_LOAD_CNT, elem.getClassLoadedCount());
        generator.writeNumberField(LOG_CLASS_UNLOAD_CNT, elem.getClassUnloadedCount());
        generator.writeNumberField(LOG_CLASS_TOTAL_LOAD_CNT, elem.getClassTotalLoadedCount());
        // Thread
        generator.writeNumberField(LOG_THREAD_CNT, elem.getThreadCount());
        generator.writeNumberField(LOG_DAEMON_TH_CNT, elem.getDaemonThreadCount());
        generator.writeNumberField(LOG_PEAK_TH_CNT, elem.getPeakThreadCount());
        // Memory
        if (elem.getHeapSize() != null) {
            generator.writeNumberField(LOG_MEM_HEAP_INIT, elem.getHeapSize().getInit());
            generator.writeNumberField(LOG_MEM_HEAP_USED, elem.getHeapSize().getUsed());
            generator.writeNumberField(LOG_MEM_HEAP_COMMITED, elem.getHeapSize().getCommitted());
            generator.writeNumberField(LOG_MEM_HEAP_MAX, elem.getHeapSize().getMax());
        }
        if (elem.getNotheapSize() != null) {
            generator.writeNumberField(LOG_MEM_NOTHEAP_INIT, elem.getNotheapSize().getInit());
            generator.writeNumberField(LOG_MEM_NOTHEAP_USED, elem.getNotheapSize().getUsed());
            generator.writeNumberField(LOG_MEM_NOTHEAP_COMMITED, elem.getNotheapSize().getCommitted());
            generator.writeNumberField(LOG_MEM_NOTHEAP_MAX, elem.getNotheapSize().getMax());
        }
        generator.writeNumberField(LOG_MEM_PENDING_FIN_CNT, elem.getPendingFinalizationCount_());
        // OS Information
        generator.writeNumberField(LOG_OS_TOTAL_PHY_MEM_SIZE, elem.getTotalPhysicalMemorySize());
        generator.writeNumberField(LOG_OS_TOTAL_SWAP_MEM_SIZE, elem.getTotalSwapSpaceSize());
        generator.writeNumberField(LOG_OS_FREE_PHY_MEM_SIZE, elem.getFreePhysicalMemorySize());
        generator.writeNumberField(LOG_OS_FREE_SWAP_MEM_SIZE, elem.getFreeSwapSpaceSize());
        generator.writeNumberField(LOG_OS_COMMIT_VMEM_SIZE, elem.getCommittedVirtualMemorySize());

        Collection<GarbageCollectorState> gcColl = elem.getGcStateCollection();
        if (null != gcColl) {
            // GC INformation (Array output)
            generator.writeFieldName(LOG_KEY_GC_COLLECT);
            generator.writeStartArray();
            for (GarbageCollectorState gcElem : gcColl) {
                generator.writeStartObject();
                generator.writeStringField(LOG_GC_MEM_MGR_NAME, gcElem.getMemoryManagerName());
                generator.writeNumberField(LOG_GC_COLLECTION_CNT, gcElem.getCollectionCount());
                generator.writeNumberField(LOG_GC_COLLECTION_TIME, gcElem.getCollectionTime());
                generator.writeEndObject();
            }
            generator.writeEndArray();
        }

        generator.writeEndObject();
        generator.writeRaw("\n");
    }
}

From source file:de.escalon.hypermedia.spring.hydra.LinkListSerializer.java

private void writePossiblePropertyValues(JsonGenerator jgen, String currentVocab,
        ActionInputParameter actionInputParameter, @SuppressWarnings("unused") Object[] possiblePropertyValues)
        throws IOException {
    // Enable the following to list possible values.
    // Problem: how to express individuals only for certain hydra:options
    // not all hydra:options should be taken as uris, sometimes they might be just literals
    // how to make that clear to the client?
    // maybe we must write them out for options
    //        if (possiblePropertyValues.length > 0) {
    //            jgen.writeArrayFieldStart("hydra:option");
    ////from   w  ww.j a  v a2 s  .  com
    //            for (Object possibleValue : possiblePropertyValues) {
    //                // TODO: apply "hydra:option" : { "@type": "@vocab"} to context for enums
    //                writeScalarValue(jgen, possibleValue, rootParameter.getParameterType());
    //            }
    //            jgen.writeEndArray();
    //        }

    if (actionInputParameter.isArrayOrCollection()) {
        jgen.writeBooleanField(getPropertyOrClassNameInVocab(currentVocab, "multipleValues",
                LdContextFactory.HTTP_SCHEMA_ORG, "schema:"), true);
    }

    //  valueRequired (hard to say, using @Access on Event is for all update requests - or make
    //     specific request beans for different
    //     purposes rather than always passing an instance of e.g. Event?)
    //       -> update is a different use case than create - or maybe have an @Requires("eventStatus")
    //          annotation alongside requestBody to tell which attributes are required or writable, and use
    // Requires over
    //          bean structure, where ctor with least length of args is required and setters are supported
    //          but optional? The bean structure does say what is writable for updates, but not what is required
    // for creation. Right now setters are supportedProperties. For creation we would have to add constructor
    // arguments as supportedProperties.
    //  (/) defaultValue (pre-filled value, e.g. list of selected items for option)
    //  valueName (for iri templates only)
    //  (/) readonlyValue (true for final public field or absence of setter, send fixed value like hidden field?)
    // -> use hydra:readable, hydra:writable
    //  (/) multipleValues
    //  (/) valueMinLength
    //  (/) valueMaxLength
    //  (/) valuePattern
    //  minValue (DateTime support)
    //  maxValue (DateTime support)
    //  (/) stepValue
    final Map<String, Object> inputConstraints = actionInputParameter.getInputConstraints();

    if (actionInputParameter.hasValue()) {
        if (actionInputParameter.isArrayOrCollection()) {
            Object[] callValues = actionInputParameter.getValues();
            Class<?> componentType = callValues.getClass().getComponentType();
            // only write defaultValue for array of scalars
            if (DataType.isSingleValueType(componentType)) {
                jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab, "defaultValue",
                        LdContextFactory.HTTP_SCHEMA_ORG, "schema:"));
                jgen.writeStartArray();
                for (Object callValue : callValues) {
                    writeScalarValue(jgen, callValue, componentType);
                }
                jgen.writeEndArray();
            }
        } else {
            jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab, "defaultValue",
                    LdContextFactory.HTTP_SCHEMA_ORG, "schema:"));

            writeScalarValue(jgen, actionInputParameter.getValue(), actionInputParameter.getParameterType());
        }
    }

    if (!inputConstraints.isEmpty()) {
        final List<String> keysToAppendValue = Arrays.asList(Input.MAX, Input.MIN, Input.STEP);
        for (String keyToAppendValue : keysToAppendValue) {
            final Object constraint = inputConstraints.get(keyToAppendValue);
            if (constraint != null) {
                jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab, keyToAppendValue + "Value",
                        LdContextFactory.HTTP_SCHEMA_ORG, "schema:"));
                jgen.writeNumber(constraint.toString());
            }
        }

        final List<String> keysToPrependValue = Arrays.asList(Input.MAX_LENGTH, Input.MIN_LENGTH,
                Input.PATTERN);
        for (String keyToPrependValue : keysToPrependValue) {
            final Object constraint = inputConstraints.get(keyToPrependValue);
            if (constraint != null) {
                jgen.writeFieldName(getPropertyOrClassNameInVocab(currentVocab,
                        "value" + StringUtils.capitalize(keyToPrependValue), LdContextFactory.HTTP_SCHEMA_ORG,
                        "schema:"));
                if (Input.PATTERN.equals(keyToPrependValue)) {
                    jgen.writeString(constraint.toString());
                } else {
                    jgen.writeNumber(constraint.toString());
                }
            }
        }

    }

}

From source file:org.apache.nifi.processors.elasticsearch.PutElasticsearchHttpRecord.java

@SuppressWarnings("unchecked")
private void writeValue(final JsonGenerator generator, final Object value, final String fieldName,
        final DataType dataType) throws IOException {
    if (value == null) {
        generator.writeNull();//from   w  w w. ja v a  2  s. c om
        return;
    }

    final DataType chosenDataType = dataType.getFieldType() == RecordFieldType.CHOICE
            ? DataTypeUtils.chooseDataType(value, (ChoiceDataType) dataType)
            : dataType;
    final Object coercedValue = DataTypeUtils.convertType(value, chosenDataType, fieldName);
    if (coercedValue == null) {
        generator.writeNull();
        return;
    }

    switch (chosenDataType.getFieldType()) {
    case DATE: {
        final String stringValue = DataTypeUtils.toString(coercedValue,
                () -> DataTypeUtils.getDateFormat(RecordFieldType.DATE.getDefaultFormat()));
        if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
            generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
        } else {
            generator.writeString(stringValue);
        }
        break;
    }
    case TIME: {
        final String stringValue = DataTypeUtils.toString(coercedValue,
                () -> DataTypeUtils.getDateFormat(RecordFieldType.TIME.getDefaultFormat()));
        if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
            generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
        } else {
            generator.writeString(stringValue);
        }
        break;
    }
    case TIMESTAMP: {
        final String stringValue = DataTypeUtils.toString(coercedValue,
                () -> DataTypeUtils.getDateFormat(RecordFieldType.TIMESTAMP.getDefaultFormat()));
        if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
            generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
        } else {
            generator.writeString(stringValue);
        }
        break;
    }
    case DOUBLE:
        generator.writeNumber(DataTypeUtils.toDouble(coercedValue, fieldName));
        break;
    case FLOAT:
        generator.writeNumber(DataTypeUtils.toFloat(coercedValue, fieldName));
        break;
    case LONG:
        generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
        break;
    case INT:
    case BYTE:
    case SHORT:
        generator.writeNumber(DataTypeUtils.toInteger(coercedValue, fieldName));
        break;
    case CHAR:
    case STRING:
        generator.writeString(coercedValue.toString());
        break;
    case BIGINT:
        if (coercedValue instanceof Long) {
            generator.writeNumber((Long) coercedValue);
        } else {
            generator.writeNumber((BigInteger) coercedValue);
        }
        break;
    case BOOLEAN:
        final String stringValue = coercedValue.toString();
        if ("true".equalsIgnoreCase(stringValue)) {
            generator.writeBoolean(true);
        } else if ("false".equalsIgnoreCase(stringValue)) {
            generator.writeBoolean(false);
        } else {
            generator.writeString(stringValue);
        }
        break;
    case RECORD: {
        final Record record = (Record) coercedValue;
        final RecordDataType recordDataType = (RecordDataType) chosenDataType;
        final RecordSchema childSchema = recordDataType.getChildSchema();
        writeRecord(record, childSchema, generator);
        break;
    }
    case MAP: {
        final MapDataType mapDataType = (MapDataType) chosenDataType;
        final DataType valueDataType = mapDataType.getValueType();
        final Map<String, ?> map = (Map<String, ?>) coercedValue;
        generator.writeStartObject();
        for (final Map.Entry<String, ?> entry : map.entrySet()) {
            final String mapKey = entry.getKey();
            final Object mapValue = entry.getValue();
            generator.writeFieldName(mapKey);
            writeValue(generator, mapValue, fieldName + "." + mapKey, valueDataType);
        }
        generator.writeEndObject();
        break;
    }
    case ARRAY:
    default:
        if (coercedValue instanceof Object[]) {
            final Object[] values = (Object[]) coercedValue;
            final ArrayDataType arrayDataType = (ArrayDataType) dataType;
            final DataType elementType = arrayDataType.getElementType();
            writeArray(values, fieldName, generator, elementType);
        } else {
            generator.writeString(coercedValue.toString());
        }
        break;
    }
}