List of usage examples for org.json.simple.parser JSONParser parse
public void parse(Reader in, ContentHandler contentHandler, boolean isResume) throws IOException, ParseException
From source file:com.dynamobi.db.conn.couchdb.CouchUdx.java
/** * Called by a custom LucidDB function for each view. * @param userName - CouchDB user name//w ww . j a va2s . c om * @param pw - CouchDB password * @param url - CouchDB REST URL * @param view - CouchDB REST view -- concatenated on the end of URL with * a slash prefix if necessary. * @param limit - Limit parameter passed to couchdb * @param reduce - if false, we pass &reduce=false to the view. * @param groupLevel - sent to view for group reduction, default 'EXACT' * Possible values: 'EXACT', sends &group=true. * 'NONE': sends &group=false. * 1-N: sends &group_level=x to the view or summarizer. 1 says to group * on the first index of the array key, 2 says the first two indexes, * N all indexes (equivalent to 'EXACT'). * * (the following should be done in logic rewrite rule?) TODO: * 'CALCULATE': typically set by the pushdown optimizer, instructs * this udx to best-guess what group level we can/should push down. * The basic idea is that if the columns in a GROUP BY statement belong * to objects defined as elements of the key array for the first key-value * pair returned by a view, we will push down the number of columns being * grouped by and ignore the grouping on the LucidDB end. Otherwise all * group by's will still be done by LucidDB. * @param resultInserter - Table for inserting results. Assumed to have the * necessary column names in the order we get them. */ public static void query(String userName, String pw, String url, String view, String limit, boolean reduce, String groupLevel, boolean outputJson, PreparedStatement resultInserter) throws SQLException { // Specialize so we can column names for our resultInserter // instead of assuming an order. ParameterMetaData pmd = resultInserter.getParameterMetaData(); FarragoParameterMetaData fpmd = (FarragoParameterMetaData) pmd; int paramCount = fpmd.getParameterCount(); String[] paramNames = new String[paramCount]; for (int i = 0; i < paramCount; i++) { paramNames[i] = fpmd.getFieldName(i + 1); // JDBC offset } RowProducer producer = new RowProducer(); JSONParser parser = new JSONParser(); InputStreamReader in = getViewStream(userName, pw, url, view, limit, reduce, groupLevel, true); while (!producer.isDone()) { try { parser.parse(in, producer, true); } catch (Throwable e) { // IOException, ParseException throw new SQLException(e); } if (!producer.getKey().equals("key")) continue; Object key = producer.getValue(); try { parser.parse(in, producer, true); } catch (Throwable e) { // IOException, ParseException throw new SQLException(e); } assert (producer.getKey().equals("value")); Object value = producer.getValue(); if (outputJson) { // put key in first col, val in second col, escape. resultInserter.setString(1, key.toString()); resultInserter.setString(2, value.toString()); resultInserter.executeUpdate(); continue; } Map<String, Object> params = new HashMap<String, Object>(paramNames.length); int dupes = mergeParams(params, key, "KEY"); dupes += mergeParams(params, value, "VALUE"); if (params.size() - dupes != paramNames.length) { // We have more params than columns.. throw new SQLException("Read " + params.size() + " params and " + paramNames.length + " columns, which need to match. Did you " + "add column(s) for both the key and value?"); } for (int c = 0; c < paramNames.length; c++) { Object o = params.get(paramNames[c]); if (o != null) { resultInserter.setObject(c + 1, o); } } resultInserter.executeUpdate(); } }