Example usage for com.fasterxml.jackson.core JsonGenerator close

List of usage examples for com.fasterxml.jackson.core JsonGenerator close

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonGenerator close.

Prototype

@Override
public abstract void close() throws IOException;

Source Link

Document

Method called to close this generator, so that no more content can be written.

Usage

From source file:org.apache.olingo.server.core.serializer.json.ODataJsonSerializer.java

@Override
public SerializerResult entityCollection(final ServiceMetadata metadata, final EdmEntityType entityType,
        final AbstractEntityCollection entitySet, final EntityCollectionSerializerOptions options)
        throws SerializerException {
    OutputStream outputStream = null;
    SerializerException cachedException = null;
    boolean pagination = false;
    try {//from  w  w w.j a  v  a2  s .c o m
        CircleStreamBuffer buffer = new CircleStreamBuffer();
        outputStream = buffer.getOutputStream();
        JsonGenerator json = new JsonFactory().createGenerator(outputStream);
        json.writeStartObject();

        final ContextURL contextURL = checkContextURL(options == null ? null : options.getContextURL());
        String name = contextURL == null ? null : contextURL.getEntitySetOrSingletonOrType();
        writeContextURL(contextURL, json);

        writeMetadataETag(metadata, json);

        if (options != null && options.getCount() != null && options.getCount().getValue()) {
            writeInlineCount("", entitySet.getCount(), json);
        }
        writeOperations(entitySet.getOperations(), json);
        json.writeFieldName(Constants.VALUE);
        if (options == null) {
            writeEntitySet(metadata, entityType, entitySet, null, null, null, false, null, name, json);
        } else {
            writeEntitySet(metadata, entityType, entitySet, options.getExpand(), null, options.getSelect(),
                    options.getWriteOnlyReferences(), null, name, json);
        }
        writeNextLink(entitySet, json, pagination);
        writeDeltaLink(entitySet, json, pagination);

        json.close();
        outputStream.close();
        return SerializerResultImpl.with().content(buffer.getInputStream()).build();
    } catch (final IOException e) {
        cachedException = new SerializerException(IO_EXCEPTION_TEXT, e,
                SerializerException.MessageKeys.IO_EXCEPTION);
        throw cachedException;
    } catch (DecoderException e) {
        cachedException = new SerializerException(IO_EXCEPTION_TEXT, e,
                SerializerException.MessageKeys.IO_EXCEPTION);
        throw cachedException;
    } finally {
        closeCircleStreamBufferOutput(outputStream, cachedException);
    }
}

From source file:org.neo4j.ontology.server.unmanaged.AnnotationResource.java

@GET
@Produces(MediaType.APPLICATION_JSON)/*w w w  .  j a v a  2 s .  c  o  m*/
@Path("/{userName}")
public Response getAnnotationSets(final @PathParam("userName") String userName,
        final @DefaultValue("0") @QueryParam("objectification") int objectification) {
    StreamingOutput stream = new StreamingOutput() {
        @Override
        public void write(OutputStream os) throws IOException, WebApplicationException {
            Map<Long, List<Long>> associatedDataSets = new HashMap<>();
            Label annotationLabel = DynamicLabel.label("AnnotationSets" + capitalize(userName));

            JsonGenerator jg = objectMapper.getFactory().createGenerator(os, JsonEncoding.UTF8);
            jg.writeStartObject();
            jg.writeFieldName("nodes");

            if (objectification > 0) {
                jg.writeStartObject();
            } else {
                jg.writeStartArray();
            }

            try (Transaction tx = graphDb.beginTx();
                    ResourceIterator<Node> users = graphDb.findNodes(USER, "name", userName)) {
                if (users.hasNext()) {
                    getDirectAnnotationTerms(getAccessibleDataSets(users.next()), associatedDataSets);
                }
                tx.success();
            }

            try (Transaction tx = graphDb.beginTx();
                    ResourceIterator<Node> terms = graphDb.findNodes(annotationLabel)) {
                while (terms.hasNext()) {
                    Node term = terms.next();
                    if (objectification > 0) {
                        jg.writeFieldName(term.getProperty("uri").toString());
                    }
                    if (objectification > 1) {
                        if (associatedDataSets.containsKey(term.getId())) {
                            writeJsonNodeObjectifiedObject(jg, term, annotationLabel,
                                    associatedDataSets.get(term.getId()));
                        } else {
                            writeJsonNodeObjectifiedObject(jg, term, annotationLabel);
                        }
                    } else {
                        if (associatedDataSets.containsKey(term.getId())) {
                            writeJsonNodeObject(jg, term, annotationLabel,
                                    associatedDataSets.get(term.getId()));
                        } else {
                            writeJsonNodeObject(jg, term, annotationLabel);
                        }
                    }
                }
                tx.success();
            }

            if (objectification > 0) {
                jg.writeEndObject();
            } else {
                jg.writeEndArray();
            }
            jg.writeEndObject();
            jg.flush();
            jg.close();
        }
    };

    return Response.ok().entity(stream).type(MediaType.APPLICATION_JSON).build();
}

From source file:org.apache.olingo.server.core.serializer.json.ODataJsonSerializer.java

@Override
public SerializerResult complexCollection(final ServiceMetadata metadata, final EdmComplexType type,
        final Property property, final ComplexSerializerOptions options) throws SerializerException {
    OutputStream outputStream = null;
    SerializerException cachedException = null;
    try {//from  w w  w  . j  a  v a2 s  .  co  m
        final ContextURL contextURL = checkContextURL(options == null ? null : options.getContextURL());
        CircleStreamBuffer buffer = new CircleStreamBuffer();
        outputStream = buffer.getOutputStream();
        JsonGenerator json = new JsonFactory().createGenerator(outputStream);
        json.writeStartObject();
        writeContextURL(contextURL, json);
        writeMetadataETag(metadata, json);
        if (isODataMetadataFull) {
            json.writeStringField(constants.getType(),
                    "#Collection(" + type.getFullQualifiedName().getFullQualifiedNameAsString() + ")");
        }
        writeOperations(property.getOperations(), json);
        json.writeFieldName(Constants.VALUE);
        Set<List<String>> selectedPaths = null;
        if (null != options && null != options.getSelect()) {
            final boolean all = ExpandSelectHelper.isAll(options.getSelect());
            selectedPaths = all || property.isPrimitive() ? null
                    : ExpandSelectHelper.getSelectedPaths(options.getSelect().getSelectItems());
        }
        Set<List<String>> expandPaths = null;
        if (null != options && null != options.getExpand()) {
            expandPaths = ExpandSelectHelper.getExpandedItemsPath(options.getExpand());
        }
        writeComplexCollection(metadata, type, property, selectedPaths, json, expandPaths, null,
                options == null ? null : options.getExpand());
        json.writeEndObject();

        json.close();
        outputStream.close();
        return SerializerResultImpl.with().content(buffer.getInputStream()).build();
    } catch (final IOException e) {
        cachedException = new SerializerException(IO_EXCEPTION_TEXT, e,
                SerializerException.MessageKeys.IO_EXCEPTION);
        throw cachedException;
    } finally {
        closeCircleStreamBufferOutput(outputStream, cachedException);
    }
}

From source file:com.baidubce.services.bos.BosClient.java

/**
 * Sets the Acl for the specified Bos bucket.
 *
 * @param request The request object containing the bucket to modify and the ACL to set.
 *///from w  w w . j a v  a 2 s  . c  o  m
public void setBucketAcl(SetBucketAclRequest request) {
    checkNotNull(request, "request should not be null.");

    InternalRequest internalRequest = this.createRequest(request, HttpMethodName.PUT);
    internalRequest.addParameter("acl", null);

    if (request.getCannedAcl() != null) {
        internalRequest.addHeader(Headers.BCE_ACL, request.getCannedAcl().toString());
        this.setZeroContentLength(internalRequest);
    } else if (request.getAccessControlList() != null) {
        byte[] json = null;
        List<Grant> grants = request.getAccessControlList();
        StringWriter writer = new StringWriter();
        try {
            JsonGenerator jsonGenerator = JsonUtils.jsonGeneratorOf(writer);
            jsonGenerator.writeStartObject();
            jsonGenerator.writeArrayFieldStart("accessControlList");
            for (Grant grant : grants) {
                jsonGenerator.writeStartObject();
                jsonGenerator.writeArrayFieldStart("grantee");
                for (Grantee grantee : grant.getGrantee()) {
                    jsonGenerator.writeStartObject();
                    jsonGenerator.writeStringField("id", grantee.getId());
                    jsonGenerator.writeEndObject();
                }
                jsonGenerator.writeEndArray();
                jsonGenerator.writeArrayFieldStart("permission");
                for (Permission permission : grant.getPermission()) {
                    jsonGenerator.writeString(permission.toString());
                }
                jsonGenerator.writeEndArray();
                jsonGenerator.writeEndObject();
            }
            jsonGenerator.writeEndArray();
            jsonGenerator.writeEndObject();
            jsonGenerator.close();
        } catch (IOException e) {
            throw new BceClientException("Fail to generate json", e);
        }
        try {
            json = writer.toString().getBytes(DEFAULT_ENCODING);
        } catch (UnsupportedEncodingException e) {
            throw new BceClientException("Fail to get UTF-8 bytes", e);
        }
        internalRequest.addHeader(Headers.CONTENT_LENGTH, String.valueOf(json.length));
        internalRequest.addHeader(Headers.CONTENT_TYPE, "application/json");
        internalRequest.setContent(RestartableInputStream.wrap(json));
    } else {
        checkNotNull(null, "request.acl should not be null.");
    }

    this.invokeHttpClient(internalRequest, BosResponse.class);
}

From source file:org.jmxtrans.embedded.output.LibratoWriter.java

public void serialize(@Nonnull Iterable<QueryResult> counters, @Nonnull Iterable<QueryResult> gauges,
        @Nonnull OutputStream out) throws IOException {
    JsonGenerator g = jsonFactory.createGenerator(out, JsonEncoding.UTF8);
    g.writeStartObject();//w w w  .  j  av a 2 s.co  m
    g.writeArrayFieldStart("counters");

    for (QueryResult counter : counters) {
        g.writeStartObject();
        g.writeStringField("name", counter.getName());
        if (source != null && !source.isEmpty()) {
            g.writeStringField("source", source);
        }
        g.writeNumberField("measure_time", counter.getEpoch(TimeUnit.SECONDS));
        if (counter.getValue() instanceof Integer) {
            g.writeNumberField("value", (Integer) counter.getValue());
        } else if (counter.getValue() instanceof Long) {
            g.writeNumberField("value", (Long) counter.getValue());
        } else if (counter.getValue() instanceof Float) {
            g.writeNumberField("value", (Float) counter.getValue());
        } else if (counter.getValue() instanceof Double) {
            g.writeNumberField("value", (Double) counter.getValue());
        } else if (counter.getValue() instanceof AtomicInteger) {
            g.writeNumberField("value", ((AtomicInteger) counter.getValue()).get());
        } else if (counter.getValue() instanceof AtomicLong) {
            g.writeNumberField("value", ((AtomicLong) counter.getValue()).get());
        }
        g.writeEndObject();
    }
    g.writeEndArray();

    g.writeArrayFieldStart("gauges");

    for (QueryResult gauge : gauges) {
        g.writeStartObject();
        g.writeStringField("name", gauge.getName());
        if (source != null && !source.isEmpty()) {
            g.writeStringField("source", source);
        }
        g.writeNumberField("measure_time", gauge.getEpoch(TimeUnit.SECONDS));
        if (gauge.getValue() instanceof Integer) {
            g.writeNumberField("value", (Integer) gauge.getValue());
        } else if (gauge.getValue() instanceof Long) {
            g.writeNumberField("value", (Long) gauge.getValue());
        } else if (gauge.getValue() instanceof Float) {
            g.writeNumberField("value", (Float) gauge.getValue());
        } else if (gauge.getValue() instanceof Double) {
            g.writeNumberField("value", (Double) gauge.getValue());
        } else if (gauge.getValue() instanceof AtomicInteger) {
            g.writeNumberField("value", ((AtomicInteger) gauge.getValue()).get());
        } else if (gauge.getValue() instanceof AtomicLong) {
            g.writeNumberField("value", ((AtomicLong) gauge.getValue()).get());
        }
        g.writeEndObject();
    }
    g.writeEndArray();
    g.writeEndObject();
    g.flush();
    g.close();
}

From source file:org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator.java

public static String generatePlan(JobGraph jg) {
    try {//from   w ww  . j a v  a2  s  . c  o m
        final StringWriter writer = new StringWriter(1024);

        final JsonFactory factory = new JsonFactory();
        final JsonGenerator gen = factory.createGenerator(writer);

        // start of everything
        gen.writeStartObject();
        gen.writeStringField("jid", jg.getJobID().toString());
        gen.writeStringField("name", jg.getName());
        gen.writeArrayFieldStart("nodes");

        // info per vertex
        for (JobVertex vertex : jg.getVertices()) {

            String operator = vertex.getOperatorName() != null ? vertex.getOperatorName() : NOT_SET;

            String operatorDescr = vertex.getOperatorDescription() != null ? vertex.getOperatorDescription()
                    : NOT_SET;

            String optimizerProps = vertex.getResultOptimizerProperties() != null
                    ? vertex.getResultOptimizerProperties()
                    : EMPTY;

            String description = vertex.getOperatorPrettyName() != null ? vertex.getOperatorPrettyName()
                    : vertex.getName();

            // make sure the encoding is HTML pretty
            description = StringEscapeUtils.escapeHtml4(description);
            description = description.replace("\n", "<br/>");
            description = description.replace("\\", "&#92;");

            operatorDescr = StringEscapeUtils.escapeHtml4(operatorDescr);
            operatorDescr = operatorDescr.replace("\n", "<br/>");

            gen.writeStartObject();

            // write the core properties
            gen.writeStringField("id", vertex.getID().toString());
            gen.writeNumberField("parallelism", vertex.getParallelism());
            gen.writeStringField("operator", operator);
            gen.writeStringField("operator_strategy", operatorDescr);
            gen.writeStringField("description", description);

            if (!vertex.isInputVertex()) {
                // write the input edge properties
                gen.writeArrayFieldStart("inputs");

                List<JobEdge> inputs = vertex.getInputs();
                for (int inputNum = 0; inputNum < inputs.size(); inputNum++) {
                    JobEdge edge = inputs.get(inputNum);
                    if (edge.getSource() == null) {
                        continue;
                    }

                    JobVertex predecessor = edge.getSource().getProducer();

                    String shipStrategy = edge.getShipStrategyName();
                    String preProcessingOperation = edge.getPreProcessingOperationName();
                    String operatorLevelCaching = edge.getOperatorLevelCachingDescription();

                    gen.writeStartObject();
                    gen.writeNumberField("num", inputNum);
                    gen.writeStringField("id", predecessor.getID().toString());

                    if (shipStrategy != null) {
                        gen.writeStringField("ship_strategy", shipStrategy);
                    }
                    if (preProcessingOperation != null) {
                        gen.writeStringField("local_strategy", preProcessingOperation);
                    }
                    if (operatorLevelCaching != null) {
                        gen.writeStringField("caching", operatorLevelCaching);
                    }

                    gen.writeStringField("exchange", edge.getSource().getResultType().name().toLowerCase());

                    gen.writeEndObject();
                }

                gen.writeEndArray();
            }

            // write the optimizer properties
            gen.writeFieldName("optimizer_properties");
            gen.writeRawValue(optimizerProps);

            gen.writeEndObject();
        }

        // end of everything
        gen.writeEndArray();
        gen.writeEndObject();

        gen.close();

        return writer.toString();
    } catch (Exception e) {
        throw new RuntimeException("Failed to generate plan", e);
    }
}

From source file:org.elasticsearch.client.sniff.HostsSnifferTests.java

private static SniffResponse buildSniffResponse(HostsSniffer.Scheme scheme) throws IOException {
    int numNodes = RandomInts.randomIntBetween(getRandom(), 1, 5);
    List<HttpHost> hosts = new ArrayList<>(numNodes);
    JsonFactory jsonFactory = new JsonFactory();
    StringWriter writer = new StringWriter();
    JsonGenerator generator = jsonFactory.createGenerator(writer);
    generator.writeStartObject();// w w w .j a  va2s .  c o  m
    if (getRandom().nextBoolean()) {
        generator.writeStringField("cluster_name", "elasticsearch");
    }
    if (getRandom().nextBoolean()) {
        generator.writeObjectFieldStart("bogus_object");
        generator.writeEndObject();
    }
    generator.writeObjectFieldStart("nodes");
    for (int i = 0; i < numNodes; i++) {
        String nodeId = RandomStrings.randomAsciiOfLengthBetween(getRandom(), 5, 10);
        generator.writeObjectFieldStart(nodeId);
        if (getRandom().nextBoolean()) {
            generator.writeObjectFieldStart("bogus_object");
            generator.writeEndObject();
        }
        if (getRandom().nextBoolean()) {
            generator.writeArrayFieldStart("bogus_array");
            generator.writeStartObject();
            generator.writeEndObject();
            generator.writeEndArray();
        }
        boolean isHttpEnabled = rarely() == false;
        if (isHttpEnabled) {
            String host = "host" + i;
            int port = RandomInts.randomIntBetween(getRandom(), 9200, 9299);
            HttpHost httpHost = new HttpHost(host, port, scheme.toString());
            hosts.add(httpHost);
            generator.writeObjectFieldStart("http");
            if (getRandom().nextBoolean()) {
                generator.writeArrayFieldStart("bound_address");
                generator.writeString("[fe80::1]:" + port);
                generator.writeString("[::1]:" + port);
                generator.writeString("127.0.0.1:" + port);
                generator.writeEndArray();
            }
            if (getRandom().nextBoolean()) {
                generator.writeObjectFieldStart("bogus_object");
                generator.writeEndObject();
            }
            generator.writeStringField("publish_address", httpHost.toHostString());
            if (getRandom().nextBoolean()) {
                generator.writeNumberField("max_content_length_in_bytes", 104857600);
            }
            generator.writeEndObject();
        }
        if (getRandom().nextBoolean()) {
            String[] roles = { "master", "data", "ingest" };
            int numRoles = RandomInts.randomIntBetween(getRandom(), 0, 3);
            Set<String> nodeRoles = new HashSet<>(numRoles);
            for (int j = 0; j < numRoles; j++) {
                String role;
                do {
                    role = RandomPicks.randomFrom(getRandom(), roles);
                } while (nodeRoles.add(role) == false);
            }
            generator.writeArrayFieldStart("roles");
            for (String nodeRole : nodeRoles) {
                generator.writeString(nodeRole);
            }
            generator.writeEndArray();
        }
        int numAttributes = RandomInts.randomIntBetween(getRandom(), 0, 3);
        Map<String, String> attributes = new HashMap<>(numAttributes);
        for (int j = 0; j < numAttributes; j++) {
            attributes.put("attr" + j, "value" + j);
        }
        if (numAttributes > 0) {
            generator.writeObjectFieldStart("attributes");
        }
        for (Map.Entry<String, String> entry : attributes.entrySet()) {
            generator.writeStringField(entry.getKey(), entry.getValue());
        }
        if (numAttributes > 0) {
            generator.writeEndObject();
        }
        generator.writeEndObject();
    }
    generator.writeEndObject();
    generator.writeEndObject();
    generator.close();
    return SniffResponse.buildResponse(writer.toString(), hosts);
}

From source file:org.elasticsearch.client.sniff.ElasticsearchHostsSnifferTests.java

private static SniffResponse buildSniffResponse(ElasticsearchHostsSniffer.Scheme scheme) throws IOException {
    int numNodes = RandomNumbers.randomIntBetween(getRandom(), 1, 5);
    List<HttpHost> hosts = new ArrayList<>(numNodes);
    JsonFactory jsonFactory = new JsonFactory();
    StringWriter writer = new StringWriter();
    JsonGenerator generator = jsonFactory.createGenerator(writer);
    generator.writeStartObject();/*from   w  w  w . ja va 2 s .  c  o  m*/
    if (getRandom().nextBoolean()) {
        generator.writeStringField("cluster_name", "elasticsearch");
    }
    if (getRandom().nextBoolean()) {
        generator.writeObjectFieldStart("bogus_object");
        generator.writeEndObject();
    }
    generator.writeObjectFieldStart("nodes");
    for (int i = 0; i < numNodes; i++) {
        String nodeId = RandomStrings.randomAsciiOfLengthBetween(getRandom(), 5, 10);
        generator.writeObjectFieldStart(nodeId);
        if (getRandom().nextBoolean()) {
            generator.writeObjectFieldStart("bogus_object");
            generator.writeEndObject();
        }
        if (getRandom().nextBoolean()) {
            generator.writeArrayFieldStart("bogus_array");
            generator.writeStartObject();
            generator.writeEndObject();
            generator.writeEndArray();
        }
        boolean isHttpEnabled = rarely() == false;
        if (isHttpEnabled) {
            String host = "host" + i;
            int port = RandomNumbers.randomIntBetween(getRandom(), 9200, 9299);
            HttpHost httpHost = new HttpHost(host, port, scheme.toString());
            hosts.add(httpHost);
            generator.writeObjectFieldStart("http");
            if (getRandom().nextBoolean()) {
                generator.writeArrayFieldStart("bound_address");
                generator.writeString("[fe80::1]:" + port);
                generator.writeString("[::1]:" + port);
                generator.writeString("127.0.0.1:" + port);
                generator.writeEndArray();
            }
            if (getRandom().nextBoolean()) {
                generator.writeObjectFieldStart("bogus_object");
                generator.writeEndObject();
            }
            generator.writeStringField("publish_address", httpHost.toHostString());
            if (getRandom().nextBoolean()) {
                generator.writeNumberField("max_content_length_in_bytes", 104857600);
            }
            generator.writeEndObject();
        }
        if (getRandom().nextBoolean()) {
            String[] roles = { "master", "data", "ingest" };
            int numRoles = RandomNumbers.randomIntBetween(getRandom(), 0, 3);
            Set<String> nodeRoles = new HashSet<>(numRoles);
            for (int j = 0; j < numRoles; j++) {
                String role;
                do {
                    role = RandomPicks.randomFrom(getRandom(), roles);
                } while (nodeRoles.add(role) == false);
            }
            generator.writeArrayFieldStart("roles");
            for (String nodeRole : nodeRoles) {
                generator.writeString(nodeRole);
            }
            generator.writeEndArray();
        }
        int numAttributes = RandomNumbers.randomIntBetween(getRandom(), 0, 3);
        Map<String, String> attributes = new HashMap<>(numAttributes);
        for (int j = 0; j < numAttributes; j++) {
            attributes.put("attr" + j, "value" + j);
        }
        if (numAttributes > 0) {
            generator.writeObjectFieldStart("attributes");
        }
        for (Map.Entry<String, String> entry : attributes.entrySet()) {
            generator.writeStringField(entry.getKey(), entry.getValue());
        }
        if (numAttributes > 0) {
            generator.writeEndObject();
        }
        generator.writeEndObject();
    }
    generator.writeEndObject();
    generator.writeEndObject();
    generator.close();
    return SniffResponse.buildResponse(writer.toString(), hosts);
}

From source file:com.tage.calcite.adapter.druid.DruidQuery.java

private QuerySpec getQuery(RelDataType rowType, RexNode filter, List<RexNode> projects,
        ImmutableBitSet groupSet, List<AggregateCall> aggCalls, List<String> aggNames) {
    QueryType queryType = QueryType.SELECT;
    final Translator translator = new Translator(druidTable, rowType);
    List<String> fieldNames = rowType.getFieldNames();

    Json jsonFilter = null;//  ww w  . j  a v  a2  s  . c  o  m
    if (filter != null) {
        jsonFilter = translator.translateFilter(filter);
        translator.metrics.clear();
        translator.dimensions.clear();
    }

    if (projects != null) {
        final ImmutableList.Builder<String> builder = ImmutableList.builder();
        for (RexNode project : projects) {
            builder.add(translator.translate(project));
        }
        fieldNames = builder.build();
    }

    final List<String> dimensions = new ArrayList<>();
    final List<JsonAggregation> aggregations = new ArrayList<>();

    if (groupSet != null) {
        assert aggCalls != null;
        assert aggNames != null;
        assert aggCalls.size() == aggNames.size();
        queryType = QueryType.GROUP_BY;

        final ImmutableList.Builder<String> builder = ImmutableList.builder();
        for (int groupKey : groupSet) {
            final String s = fieldNames.get(groupKey);
            dimensions.add(s);
            builder.add(s);
        }
        for (Pair<AggregateCall, String> agg : Pair.zip(aggCalls, aggNames)) {
            final JsonAggregation jsonAggregation = getJsonAggregation(fieldNames, agg.right, agg.left);
            aggregations.add(jsonAggregation);
            builder.add(jsonAggregation.name);
        }
        fieldNames = builder.build();
    } else {
        assert aggCalls == null;
        assert aggNames == null;
    }

    final StringWriter sw = new StringWriter();
    final JsonFactory factory = new JsonFactory();
    try {
        final JsonGenerator generator = factory.createGenerator(sw);

        switch (queryType) {
        case GROUP_BY:
            generator.writeStartObject();

            if (aggregations.isEmpty()) {
                // Druid requires at least one aggregation, otherwise gives:
                //   Must have at least one AggregatorFactory
                aggregations.add(new JsonAggregation("longSum", "unit_sales", "unit_sales"));
            }

            generator.writeStringField("queryType", "groupBy");
            generator.writeStringField("dataSource", druidTable.dataSource);
            generator.writeStringField("granularity", "all");
            writeField(generator, "dimensions", dimensions);
            writeFieldIf(generator, "limitSpec", null);
            writeFieldIf(generator, "filter", jsonFilter);
            writeField(generator, "aggregations", aggregations);
            writeFieldIf(generator, "postAggregations", null);
            writeField(generator, "intervals", druidTable.intervals);
            writeFieldIf(generator, "having", null);

            generator.writeEndObject();
            break;

        case SELECT:
            generator.writeStartObject();

            generator.writeStringField("queryType", "select");
            generator.writeStringField("dataSource", druidTable.dataSource);
            generator.writeStringField("descending", "false");
            writeField(generator, "intervals", druidTable.intervals);
            writeFieldIf(generator, "filter", jsonFilter);
            writeField(generator, "dimensions", translator.dimensions);
            writeField(generator, "metrics", translator.metrics);
            generator.writeStringField("granularity", "all");

            generator.writeFieldName("pagingSpec");
            generator.writeStartObject();
            final int fetch = CalciteConnectionProperty.DRUID_FETCH.wrap(new Properties()).getInt();
            generator.writeNumberField("threshold", fetch);
            generator.writeEndObject();

            generator.writeEndObject();
            break;

        default:
            throw new AssertionError("unknown query type " + queryType);
        }

        generator.close();
    } catch (IOException e) {
        e.printStackTrace();
    }

    return new QuerySpec(queryType, sw.toString(), fieldNames);
}