Example usage for org.json.simple JSONObject putAll

List of usage examples for org.json.simple JSONObject putAll

Introduction

In this page you can find the example usage for org.json.simple JSONObject putAll.

Prototype

void putAll(Map<? extends K, ? extends V> m);

Source Link

Document

Copies all of the mappings from the specified map to this map (optional operation).

Usage

From source file:org.apache.hadoop.chukwa.datacollection.adaptor.sigar.SigarRunner.java

@SuppressWarnings("unchecked")
@Override//from w  w  w .  j av a2s.c om
public void run() {
    boolean skip = false;
    CpuInfo[] cpuinfo = null;
    CpuPerc[] cpuPerc = null;
    Mem mem = null;
    Swap swap = null;
    FileSystem[] fs = null;
    String[] netIf = null;
    Uptime uptime = null;
    double[] loadavg = null;
    JSONObject json = new JSONObject();
    try {
        // CPU utilization
        JSONArray load = new JSONArray();
        try {
            cpuinfo = sigar.getCpuInfoList();
            cpuPerc = sigar.getCpuPercList();
            JSONArray cpuList = new JSONArray();
            for (int i = 0; i < cpuinfo.length; i++) {
                JSONObject cpuMap = new JSONObject();
                cpuMap.putAll(cpuinfo[i].toMap());
                cpuMap.put("combined", cpuPerc[i].getCombined() * 100);
                cpuMap.put("user", cpuPerc[i].getUser() * 100);
                cpuMap.put("sys", cpuPerc[i].getSys() * 100);
                cpuMap.put("idle", cpuPerc[i].getIdle() * 100);
                cpuMap.put("wait", cpuPerc[i].getWait() * 100);
                cpuMap.put("nice", cpuPerc[i].getNice() * 100);
                cpuMap.put("irq", cpuPerc[i].getIrq() * 100);
                cpuList.add(cpuMap);
            }
            sigar.getCpuPerc();
            json.put("cpu", cpuList);

            // Uptime
            uptime = sigar.getUptime();
            json.put("uptime", uptime.getUptime());

            // Load Average
            loadavg = sigar.getLoadAverage();
            load.add(loadavg[0]);
            load.add(loadavg[1]);
            load.add(loadavg[2]);
        } catch (SigarException se) {
            log.error("SigarException caused during collection of CPU utilization");
            log.error(ExceptionUtils.getStackTrace(se));
        } finally {
            json.put("loadavg", load);
        }

        // Memory Utilization
        JSONObject memMap = new JSONObject();
        JSONObject swapMap = new JSONObject();
        try {
            mem = sigar.getMem();
            memMap.putAll(mem.toMap());

            // Swap Utilization
            swap = sigar.getSwap();
            swapMap.putAll(swap.toMap());
        } catch (SigarException se) {
            log.error("SigarException caused during collection of Memory utilization");
            log.error(ExceptionUtils.getStackTrace(se));
        } finally {
            json.put("memory", memMap);
            json.put("swap", swapMap);
        }

        // Network Utilization
        JSONArray netInterfaces = new JSONArray();
        try {
            netIf = sigar.getNetInterfaceList();
            for (int i = 0; i < netIf.length; i++) {
                NetInterfaceStat net = new NetInterfaceStat();
                try {
                    net = sigar.getNetInterfaceStat(netIf[i]);
                } catch (SigarException e) {
                    // Ignore the exception when trying to stat network interface
                    log.warn("SigarException trying to stat network device " + netIf[i]);
                    continue;
                }
                JSONObject netMap = new JSONObject();
                netMap.putAll(net.toMap());
                if (previousNetworkStats.containsKey(netIf[i])) {
                    JSONObject deltaMap = previousNetworkStats.get(netIf[i]);
                    deltaMap.put("RxBytes", Long.parseLong(netMap.get("RxBytes").toString())
                            - Long.parseLong(deltaMap.get("RxBytes").toString()));
                    deltaMap.put("RxDropped", Long.parseLong(netMap.get("RxDropped").toString())
                            - Long.parseLong(deltaMap.get("RxDropped").toString()));
                    deltaMap.put("RxErrors", Long.parseLong(netMap.get("RxErrors").toString())
                            - Long.parseLong(deltaMap.get("RxErrors").toString()));
                    deltaMap.put("RxPackets", Long.parseLong(netMap.get("RxPackets").toString())
                            - Long.parseLong(deltaMap.get("RxPackets").toString()));
                    deltaMap.put("TxBytes", Long.parseLong(netMap.get("TxBytes").toString())
                            - Long.parseLong(deltaMap.get("TxBytes").toString()));
                    deltaMap.put("TxCollisions", Long.parseLong(netMap.get("TxCollisions").toString())
                            - Long.parseLong(deltaMap.get("TxCollisions").toString()));
                    deltaMap.put("TxErrors", Long.parseLong(netMap.get("TxErrors").toString())
                            - Long.parseLong(deltaMap.get("TxErrors").toString()));
                    deltaMap.put("TxPackets", Long.parseLong(netMap.get("TxPackets").toString())
                            - Long.parseLong(deltaMap.get("TxPackets").toString()));
                    netInterfaces.add(deltaMap);
                    skip = false;
                } else {
                    netInterfaces.add(netMap);
                    skip = true;
                }
                previousNetworkStats.put(netIf[i], netMap);
            }
        } catch (SigarException se) {
            log.error("SigarException caused during collection of Network utilization");
            log.error(ExceptionUtils.getStackTrace(se));
        } finally {
            json.put("network", netInterfaces);
        }

        // Filesystem Utilization
        JSONArray fsList = new JSONArray();
        try {
            fs = sigar.getFileSystemList();
            for (int i = 0; i < fs.length; i++) {
                FileSystemUsage usage = sigar.getFileSystemUsage(fs[i].getDirName());
                JSONObject fsMap = new JSONObject();
                fsMap.putAll(fs[i].toMap());
                fsMap.put("ReadBytes", usage.getDiskReadBytes());
                fsMap.put("Reads", usage.getDiskReads());
                fsMap.put("WriteBytes", usage.getDiskWriteBytes());
                fsMap.put("Writes", usage.getDiskWrites());
                if (previousDiskStats.containsKey(fs[i].getDevName())) {
                    JSONObject deltaMap = previousDiskStats.get(fs[i].getDevName());
                    deltaMap.put("ReadBytes", usage.getDiskReadBytes() - (Long) deltaMap.get("ReadBytes"));
                    deltaMap.put("Reads", usage.getDiskReads() - (Long) deltaMap.get("Reads"));
                    deltaMap.put("WriteBytes", usage.getDiskWriteBytes() - (Long) deltaMap.get("WriteBytes"));
                    deltaMap.put("Writes", usage.getDiskWrites() - (Long) deltaMap.get("Writes"));
                    deltaMap.put("Total", usage.getTotal());
                    deltaMap.put("Used", usage.getUsed());
                    deltaMap.putAll(fs[i].toMap());
                    fsList.add(deltaMap);
                    skip = false;
                } else {
                    fsList.add(fsMap);
                    skip = true;
                }
                previousDiskStats.put(fs[i].getDevName(), fsMap);
            }
        } catch (SigarException se) {
            log.error("SigarException caused during collection of FileSystem utilization");
            log.error(ExceptionUtils.getStackTrace(se));
        } finally {
            json.put("disk", fsList);
        }
        json.put("timestamp", System.currentTimeMillis());
        byte[] data = json.toString().getBytes(Charset.forName("UTF-8"));
        sendOffset += data.length;
        ChunkImpl c = new ChunkImpl("SystemMetrics", "Sigar", sendOffset, data, systemMetrics);
        if (!skip) {
            receiver.add(c);
        }
    } catch (InterruptedException se) {
        log.error(ExceptionUtil.getStackTrace(se));
    }
}

From source file:org.apache.metron.alerts.TelemetryAlertsBolt.java

@SuppressWarnings("unchecked")
public void execute(Tuple tuple) {

    LOG.trace("[Metron] Starting to process message for alerts");
    JSONObject original_message = null;//from ww w  .  j ava  2 s .c o  m
    String key = null;

    try {

        key = tuple.getStringByField("key");
        original_message = (JSONObject) tuple.getValueByField("message");

        if (original_message == null || original_message.isEmpty())
            throw new Exception("Could not parse message from byte stream");

        if (key == null)
            throw new Exception("Key is not valid");

        LOG.trace("[Metron] Received tuple: " + original_message);

        JSONObject alerts_tag = new JSONObject();
        Map<String, JSONObject> alerts_list = _adapter.alert(original_message);
        JSONArray uuid_list = new JSONArray();

        if (alerts_list == null || alerts_list.isEmpty()) {
            System.out.println("[Metron] No alerts detected in: " + original_message);
            _collector.ack(tuple);
            _collector.emit("message", new Values(key, original_message));
        } else {
            for (String alert : alerts_list.keySet()) {
                uuid_list.add(alert);

                LOG.trace("[Metron] Checking alerts cache: " + alert);

                if (cache.getIfPresent(alert) == null) {
                    System.out.println("[Metron]: Alert not found in cache: " + alert);

                    JSONObject global_alert = new JSONObject();
                    global_alert.putAll(_identifier);
                    global_alert.putAll(alerts_list.get(alert));
                    global_alert.put("timestamp", System.currentTimeMillis());
                    _collector.emit("alert", new Values(global_alert));

                    cache.put(alert, "");

                } else
                    LOG.trace("Alert located in cache: " + alert);

                LOG.debug("[Metron] Alerts are: " + alerts_list);

                if (original_message.containsKey("alerts")) {
                    JSONArray already_triggered = (JSONArray) original_message.get("alerts");

                    uuid_list.addAll(already_triggered);
                    LOG.trace("[Metron] Messages already had alerts...tagging more");
                }

                original_message.put("alerts", uuid_list);

                LOG.debug("[Metron] Detected alerts: " + alerts_tag);

                _collector.ack(tuple);
                _collector.emit("message", new Values(key, original_message));

            }

            /*
             * if (metricConfiguration != null) { emitCounter.inc();
             * ackCounter.inc(); }
             */
        }

    } catch (Exception e) {
        e.printStackTrace();
        LOG.error("Failed to tag message :" + original_message);
        e.printStackTrace();
        _collector.fail(tuple);

        /*
         * if (metricConfiguration != null) { failCounter.inc(); }
         */

        JSONObject error = ErrorUtils.generateErrorMessage("Alerts problem: " + original_message, e);
        _collector.emit("error", new Values(error));
    }
}

From source file:org.apache.metron.parsers.asa.GrokAsaParser.java

@Override
public List<JSONObject> parse(byte[] raw_message) {

    String toParse = "";
    JSONObject toReturn;
    List<JSONObject> messages = new ArrayList<>();
    try {//  ww  w. ja v  a2  s .  co  m

        toParse = new String(raw_message, "UTF-8");

        System.out.println("Received message: " + toParse);

        Match gm = grok.match(toParse);
        gm.captures();

        toReturn = new JSONObject();

        toReturn.putAll(gm.toMap());

        String str = toReturn.get("ciscotag").toString();
        String pattern = patternMap.get(str);

        Map<String, Object> response = getMap(pattern, toParse);

        toReturn.putAll(response);

        //System.out.println("*******I MAPPED: " + toReturn);
        long timestamp = convertToEpoch(toReturn.get("MONTH").toString(), toReturn.get("MONTHDAY").toString(),
                toReturn.get("TIME").toString(), true);
        toReturn.put("timestamp", timestamp);

        toReturn.remove("MONTHDAY");
        toReturn.remove("TIME");
        toReturn.remove("MINUTE");
        toReturn.remove("HOUR");
        toReturn.remove("YEAR");
        toReturn.remove("SECOND");

        toReturn.put("ip_src_addr", toReturn.remove("IPORHOST"));
        toReturn.put("original_string", toParse);
        messages.add(toReturn);
        return messages;

    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }

}

From source file:org.apache.metron.parsers.GrokParser.java

@SuppressWarnings("unchecked")
private Optional<MessageParserResult<JSONObject>> parseMultiLine(byte[] rawMessage) {
    List<JSONObject> messages = new ArrayList<>();
    Map<Object, Throwable> errors = new HashMap<>();
    String originalMessage = null;
    // read the incoming raw data as if it may have multiple lines of logs
    // if there is only only one line, it will just get processed.
    try (BufferedReader reader = new BufferedReader(
            new StringReader(new String(rawMessage, StandardCharsets.UTF_8)))) {
        while ((originalMessage = reader.readLine()) != null) {
            LOG.debug("Grok parser parsing message: {}", originalMessage);
            try {
                Match gm = grok.match(originalMessage);
                gm.captures();//from www.  j ava2  s .  c o m
                JSONObject message = new JSONObject();
                message.putAll(gm.toMap());

                if (message.size() == 0) {
                    Throwable rte = new RuntimeException(
                            "Grok statement produced a null message. Original message was: " + originalMessage
                                    + " and the parsed message was: " + message + " . Check the pattern at: "
                                    + grokPath);
                    errors.put(originalMessage, rte);
                    continue;
                }
                message.put("original_string", originalMessage);
                for (String timeField : timeFields) {
                    String fieldValue = (String) message.get(timeField);
                    if (fieldValue != null) {
                        message.put(timeField, toEpoch(fieldValue));
                    }
                }
                if (timestampField != null) {
                    message.put(Constants.Fields.TIMESTAMP.getName(),
                            formatTimestamp(message.get(timestampField)));
                }
                message.remove(patternLabel);
                postParse(message);
                messages.add(message);
                LOG.debug("Grok parser parsed message: {}", message);
            } catch (Exception e) {
                LOG.error(e.getMessage(), e);
                errors.put(originalMessage, e);
            }
        }
    } catch (IOException e) {
        LOG.error(e.getMessage(), e);
        Exception innerException = new IllegalStateException(
                "Grok parser Error: " + e.getMessage() + " on " + originalMessage, e);
        return Optional.of(new DefaultMessageParserResult<>(innerException));
    }
    return Optional.of(new DefaultMessageParserResult<>(messages, errors));
}

From source file:org.apache.metron.parsers.GrokParser.java

@SuppressWarnings("unchecked")
private Optional<MessageParserResult<JSONObject>> parseSingleLine(byte[] rawMessage) {
    List<JSONObject> messages = new ArrayList<>();
    Map<Object, Throwable> errors = new HashMap<>();
    String originalMessage = null;
    try {/*from w  w w . j  av a2s  .co m*/
        originalMessage = new String(rawMessage, "UTF-8");
        LOG.debug("Grok parser parsing message: {}", originalMessage);
        Match gm = grok.match(originalMessage);
        gm.captures();
        JSONObject message = new JSONObject();
        message.putAll(gm.toMap());

        if (message.size() == 0) {
            Throwable rte = new RuntimeException(
                    "Grok statement produced a null message. Original message was: " + originalMessage
                            + " and the parsed message was: " + message + " . Check the pattern at: "
                            + grokPath);
            errors.put(originalMessage, rte);
        } else {
            message.put("original_string", originalMessage);
            for (String timeField : timeFields) {
                String fieldValue = (String) message.get(timeField);
                if (fieldValue != null) {
                    message.put(timeField, toEpoch(fieldValue));
                }
            }
            if (timestampField != null) {
                message.put(Constants.Fields.TIMESTAMP.getName(), formatTimestamp(message.get(timestampField)));
            }
            message.remove(patternLabel);
            postParse(message);
            messages.add(message);
            LOG.debug("Grok parser parsed message: {}", message);
        }
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
        Exception innerException = new IllegalStateException(
                "Grok parser Error: " + e.getMessage() + " on " + originalMessage, e);
        return Optional.of(new DefaultMessageParserResult<>(innerException));
    }
    return Optional.of(new DefaultMessageParserResult<JSONObject>(messages, errors));
}

From source file:org.apache.metron.parsers.regex.RegularExpressionsParser.java

/**
 * Parses an unstructured text message into a json object based upon the regular expression
 * configuration supplied./* w  w  w  .  ja v a 2  s. c  o m*/
 *
 * @param rawMessage incoming unstructured raw text.
 * @return List of json parsed json objects. In this case list will have a single element only.
 */
@Override
public List<JSONObject> parse(byte[] rawMessage) {
    String originalMessage = null;
    try {
        originalMessage = new String(rawMessage, UTF_8).trim();
        LOG.debug(" raw message. {}", originalMessage);
        if (originalMessage.isEmpty()) {
            LOG.warn("Message is empty.");
            return Arrays.asList(new JSONObject());
        }
    } catch (Exception e) {
        LOG.error("[Metron] Could not read raw message. {} " + originalMessage, e);
        throw new RuntimeException(e.getMessage(), e);
    }

    JSONObject parsedJson = new JSONObject();
    if (messageHeaderPatternsMap.size() > 0) {
        parsedJson.putAll(extractHeaderFields(originalMessage));
    }
    parsedJson.putAll(parse(originalMessage));
    parsedJson.put(Constants.Fields.ORIGINAL.getName(), originalMessage);
    /**
     * Populate the output json with default timestamp.
     */
    parsedJson.put(Constants.Fields.TIMESTAMP.getName(), System.currentTimeMillis());
    applyFieldTransformations(parsedJson);
    return Arrays.asList(parsedJson);
}

From source file:org.apache.metron.parsing.parsers.BasicYafParser.java

private List<JSONObject> parseWithGrok(byte[] msg) {
    _LOG.trace("[Metron] Starting to parse incoming message with grok");
    JSONObject jsonMessage = new JSONObject();
    List<JSONObject> messages = new ArrayList<>();
    try {/*  w w w.  j a  va  2s .c o  m*/
        String rawMessage = new String(msg, "UTF-8");

        Match gm = grok.match(rawMessage);
        gm.captures();
        Map grokMap = gm.toMap();
        jsonMessage.putAll(gm.toMap());

        jsonMessage.put("original_string", rawMessage);
        String startTime = (String) grokMap.get("start_time");
        long timestamp = 0L;
        if (startTime != null) {
            timestamp = toEpoch(startTime);
            jsonMessage.put("timestamp", timestamp);
        } else {
            jsonMessage.put("timestamp", "0");
        }
        String endTime = (String) grokMap.get("end_time");
        if (endTime != null) {
            jsonMessage.put("end_time", toEpoch(endTime));
        } else {
            jsonMessage.put("end_time", "0");
        }
        jsonMessage.remove("YAF_DELIMITED");
        jsonMessage.remove("start_time");
        messages.add(jsonMessage);
    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }
    return messages;
}

From source file:org.apache.oozie.servlet.BaseAdminServlet.java

/**
 * Return safemode state, instrumentation, configuration, osEnv or
 * javaSysProps/*from   ww w .j  ava  2 s .co  m*/
 */
@Override
@SuppressWarnings("unchecked")
public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    String resource = getResourceName(request);
    Instrumentation instr = Services.get().get(InstrumentationService.class).get();

    if (resource.equals(RestConstants.ADMIN_STATUS_RESOURCE)) {
        JSONObject json = new JSONObject();
        populateOozieMode(json);
        // json.put(JsonTags.SYSTEM_SAFE_MODE, getOozeMode());
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_OS_ENV_RESOURCE)) {
        JSONObject json = new JSONObject();
        json.putAll(instr.getOSEnv());
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_JAVA_SYS_PROPS_RESOURCE)) {
        JSONObject json = new JSONObject();
        json.putAll(instr.getJavaSystemProperties());
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_CONFIG_RESOURCE)) {
        JSONObject json = new JSONObject();
        json.putAll(instr.getConfiguration());
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_INSTRUMENTATION_RESOURCE)) {
        sendInstrumentationResponse(response, instr);
    } else if (resource.equals(RestConstants.ADMIN_BUILD_VERSION_RESOURCE)) {
        JSONObject json = new JSONObject();
        json.put(JsonTags.BUILD_VERSION, BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VERSION));
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_QUEUE_DUMP_RESOURCE)) {
        JSONObject json = new JSONObject();
        getQueueDump(json);
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_TIME_ZONES_RESOURCE)) {
        JSONObject json = new JSONObject();
        json.put(JsonTags.AVAILABLE_TIME_ZONES, availableTimeZonesToJsonArray());
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_JMS_INFO)) {
        String timeZoneId = request.getParameter(RestConstants.TIME_ZONE_PARAM) == null ? "GMT"
                : request.getParameter(RestConstants.TIME_ZONE_PARAM);
        JsonBean jmsBean = getJMSConnectionInfo(request, response);
        sendJsonResponse(response, HttpServletResponse.SC_OK, jmsBean, timeZoneId);
    } else if (resource.equals(RestConstants.ADMIN_AVAILABLE_OOZIE_SERVERS_RESOURCE)) {
        JSONObject json = new JSONObject();
        json.putAll(getOozieURLs());
        sendJsonResponse(response, HttpServletResponse.SC_OK, json);
    } else if (resource.equals(RestConstants.ADMIN_UPDATE_SHARELIB)) {
        authorizeRequest(request);
        updateShareLib(request, response);
    } else if (resource.equals(RestConstants.ADMIN_LIST_SHARELIB)) {
        String sharelibKey = request.getParameter(RestConstants.SHARE_LIB_REQUEST_KEY);
        sendJsonResponse(response, HttpServletResponse.SC_OK, getShareLib(sharelibKey));
    } else if (resource.equals(RestConstants.ADMIN_METRICS_RESOURCE)) {
        sendMetricsResponse(response);
    }
}

From source file:org.apache.oozie.servlet.BaseAdminServlet.java

@SuppressWarnings("unchecked")
private JSONObject updateLocalShareLib(HttpServletRequest request) {
    ShareLibService shareLibService = Services.get().get(ShareLibService.class);
    JSONObject json = new JSONObject();
    json.put(JsonTags.SHARELIB_UPDATE_HOST, ConfigUtils.getOozieEffectiveUrl());
    try {/*from  w w w. ja  va2 s  .c  o  m*/
        json.putAll(shareLibService.updateShareLib());
        json.put(JsonTags.SHARELIB_UPDATE_STATUS, "Successful");
    } catch (Exception e) {
        json.put(JsonTags.SHARELIB_UPDATE_STATUS, e.getClass().getName() + ": " + e.getMessage());
    }
    return json;
}

From source file:org.apache.sqoop.connector.idf.JSONIntermediateDataFormat.java

@SuppressWarnings("unchecked")
private JSONObject toJSON(Object[] objectArray) {

    if (objectArray == null) {
        return null;
    }/*from   w  w  w .ja va 2  s.c  o m*/
    Column[] columns = schema.getColumnsArray();

    if (objectArray.length != columns.length) {
        throw new SqoopException(IntermediateDataFormatError.INTERMEDIATE_DATA_FORMAT_0001,
                "The data " + objectArray.toString() + " has the wrong number of fields.");
    }
    JSONObject json = new JSONObject();
    for (int i = 0; i < objectArray.length; i++) {
        if (objectArray[i] == null && !columns[i].isNullable()) {
            throw new SqoopException(IntermediateDataFormatError.INTERMEDIATE_DATA_FORMAT_0005,
                    columns[i].getName() + " does not support null values");
        }
        if (objectArray[i] == null) {
            json.put(columns[i].getName(), null);
            continue;
        }
        switch (columns[i].getType()) {
        case ARRAY:
        case SET:
            // store as JSON array
            Object[] objArray = (Object[]) objectArray[i];
            JSONArray jsonArray = toJSONArray(objArray);
            json.put(columns[i].getName(), jsonArray);
            break;
        case MAP:
            // store as JSON object
            Map<Object, Object> map = (Map<Object, Object>) objectArray[i];
            JSONObject jsonObject = new JSONObject();
            jsonObject.putAll(map);
            json.put(columns[i].getName(), jsonObject);
            break;
        case ENUM:
        case TEXT:
            json.put(columns[i].getName(), objectArray[i]);
            break;
        case BINARY:
        case UNKNOWN:
            json.put(columns[i].getName(), Base64.encodeBase64String((byte[]) objectArray[i]));
            break;
        case FIXED_POINT:
        case FLOATING_POINT:
        case DECIMAL:
            // store a object
            json.put(columns[i].getName(), objectArray[i]);
            break;
        // stored in JSON as the same format as csv strings in the joda time
        // format
        case DATE_TIME:
            json.put(columns[i].getName(), removeQuotes(toCSVDateTime(objectArray[i], columns[i])));
            break;
        case TIME:
            json.put(columns[i].getName(), removeQuotes(toCSVTime(objectArray[i], columns[i])));
            break;
        case DATE:
            json.put(columns[i].getName(), removeQuotes(toCSVDate(objectArray[i])));
            break;
        case BIT:
            json.put(columns[i].getName(), Boolean.valueOf(toCSVBit(objectArray[i])));
            break;
        default:
            throw new SqoopException(IntermediateDataFormatError.INTERMEDIATE_DATA_FORMAT_0001,
                    "Column type from schema was not recognized for " + columns[i].getType());
        }
    }

    return json;
}