Example usage for org.json.simple JSONObject keySet

List of usage examples for org.json.simple JSONObject keySet

Introduction

In this page you can find the example usage for org.json.simple JSONObject keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:blog.cobablog.java

/**
 * Web service operation/*from w w w .j av  a  2 s . c o m*/
 * @param idPost
 * @return 
 */
@WebMethod(operationName = "listComment")
public List<Komentar> listComment(@WebParam(name = "idPost") String idPost) {
    ArrayList<Komentar> out = new ArrayList<>();
    try {
        //TODO write your implementation code here:
        String linkString = LINK_FIREBASE + "posts/" + idPost + "/komentar.json";
        URL link = new URL(linkString);
        BufferedReader reader = new BufferedReader(new InputStreamReader(link.openStream()));

        String s = "";
        String tmp;
        while ((tmp = reader.readLine()) != null) {
            s += tmp;
        }

        JSONParser parser = new JSONParser();
        JSONObject o = (JSONObject) parser.parse(s);

        int i;
        for (i = 0; i < o.size(); i++) {
            Komentar k = new Komentar();
            k.setId(o.keySet().toArray()[i].toString());
            JSONObject postEntry = (JSONObject) parser.parse(o.get(k.getId()).toString());
            k.setEmail((String) postEntry.get("email"));
            k.setKonten((String) postEntry.get("konten"));
            k.setNama((String) postEntry.get("nama"));
            k.setTanggal((String) postEntry.get("tanggal"));

            out.add(k);

        }

        return out;
        //System.out.println(array.get(0));
    } catch (MalformedURLException ex) {
        Logger.getLogger(cobablog.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(cobablog.class.getName()).log(Level.SEVERE, null, ex);
    } catch (ParseException ex) {
        Logger.getLogger(cobablog.class.getName()).log(Level.SEVERE, null, ex);
    }
    return out;
}

From source file:blog.cobablog.java

/**
 * Web service operation/*www . j  a va 2 s  .  c  om*/
 * @return 
 */
@WebMethod(operationName = "listUser")
public List<Pengguna> listUser() {
    //TODO write your implementation code here:
    ArrayList<Pengguna> out = new ArrayList<>();
    try {
        //TODO write your implementation code here:
        String linkString = LINK_FIREBASE + "/users.json";
        URL link = new URL(linkString);
        BufferedReader reader = new BufferedReader(new InputStreamReader(link.openStream()));

        String s = "";
        String tmp;
        while ((tmp = reader.readLine()) != null) {
            s += tmp;
        }

        JSONParser parser = new JSONParser();
        JSONObject o = (JSONObject) parser.parse(s);

        int i;
        for (i = 0; i < o.size(); i++) {
            Pengguna p = new Pengguna();
            p.setUsername(o.keySet().toArray()[i].toString());
            JSONObject postEntry = (JSONObject) parser.parse(o.get(p.getUsername()).toString());
            p.setEmail((String) postEntry.get("email"));
            p.setNama((String) postEntry.get("nama"));
            p.setPassword((String) postEntry.get("password"));
            p.setRole((String) postEntry.get("role"));
            out.add(p);
        }

        return out;
        //System.out.println(array.get(0));
    } catch (MalformedURLException ex) {
        Logger.getLogger(cobablog.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(cobablog.class.getName()).log(Level.SEVERE, null, ex);
    } catch (ParseException ex) {
        Logger.getLogger(cobablog.class.getName()).log(Level.SEVERE, null, ex);
    }
    return out;
}

From source file:com.datastax.loader.CqlDelimParser.java

@SuppressWarnings("unchecked")
public List<Object> parseJson(String line) {
    JSONObject jsonObject = null;
    try {/*from  w ww  . j  ava2  s  .c o  m*/
        jsonObject = (JSONObject) jsonParser.parse(line);
    } catch (org.json.simple.parser.ParseException e) {
        System.err.println(String.format("Invalid format in input %d: %s", line, e.getMessage()));
        return null;
    }
    String[] row = new String[columnNames.size()];
    Set<String> fields = (Set<String>) jsonObject.keySet();
    for (int i = 0; i < columnNames.size(); i++) {
        String s = columnNames.get(i);
        Object o = jsonObject.get(s);
        if (null != o)
            row[i] = o.toString();
        else
            row[i] = null;
        fields.remove(s);
    }
    if (0 != fields.size()) {
        for (String f : fields) {
            System.err.println("Unknown JSON field " + f);
        }
        return null;
    }
    return parse(row);
}

From source file:edu.ucsd.sbrg.escher.utilities.EscherParser.java

/**
 * @param inputStream/*w  w w .  jav  a 2  s  . c  om*/
 * @param defaultMapId
 * @return
 * @throws IOException
 * @throws ParseException
 */
public EscherMap parse(InputStream inputStream, String defaultMapId) throws IOException, ParseException {
    // Read JSON file
    JSONParser parser = new JSONParser();
    Reader reader = new BufferedReader(new InputStreamReader(inputStream));
    Object obj = parser.parse(reader);
    reader.close();
    if (!(obj instanceof JSONArray)) {
        logger.warning(MessageFormat.format(bundle.getString("EscherParser.JSONObjectExpected"), obj,
                obj.getClass().getName(), JSONArray.class.getName()));
        return null;
    }
    JSONArray json = (JSONArray) obj;
    JSONObject map = (JSONObject) json.get(0);

    /*
     * Create the EscherMap object.
     */
    EscherMap escherMap = new EscherMap();
    Object id = map.get(EscherKeywords.map_id.name());
    escherMap.setId(id != null ? id.toString() : defaultMapId);
    escherMap.setName(map.get(EscherKeywords.map_name.name()).toString());
    escherMap.setDescription(map.get(EscherKeywords.map_description.name()).toString());
    escherMap.setSchema(map.get(EscherKeywords.schema.name()).toString());
    escherMap.setURL(map.get(EscherKeywords.homepage.name()).toString());
    JSONObject parts = (JSONObject) json.get(1);
    Canvas canvas = parseCanvas((JSONObject) parts.get(EscherKeywords.canvas.name()));
    escherMap.setCanvas(canvas);

    /*
     * Nodes
     */
    JSONObject mapNode = (JSONObject) parts.get(EscherKeywords.nodes.name());
    if (mapNode != null) {
        for (Object object : mapNode.keySet()) {
            Node node = parseNode(object, (JSONObject) mapNode.get(object));
            escherMap.addNode(node);
            if (node.isSetCompartment()) {
                try {
                    EscherCompartment compartment = escherMap.getCompartment(node.getCompartment());
                    double x = node.getX(); // - node.getWidth()/2d;
                    double y = node.getY(); // - node.getHeight()/2d;
                    if (compartment == null) {
                        compartment = new EscherCompartment();
                        compartment.setId(node.getCompartment());
                        compartment.setX(x);
                        compartment.setY(y);
                        compartment.setWidth(0d); //node.getWidth());
                        compartment.setHeight(0d); //node.getHeight());
                        escherMap.addCompartment(compartment);
                    } else {
                        if (x < compartment.getX()) {
                            compartment.setX(x);
                        } else if (x /*+ node.getWidth()*/
                        > compartment.getX() + compartment.getWidth()) {
                            compartment.setWidth(x /* + node.getWidth()*/);
                        }
                        if (y < compartment.getY()) {
                            compartment.setY(y);
                        } else if (y /*+ node.getHeight()*/
                        > compartment.getY() + compartment.getHeight()) {
                            compartment.setHeight(y /* + node.getHeight()*/);
                        }
                    }
                } catch (Throwable t) {
                    t.printStackTrace();
                }
            }
        }
    }

    /*
     * Reactions
     */
    JSONObject mapReactions = (JSONObject) parts.get(EscherKeywords.reactions.name());
    if (mapReactions != null) {
        for (Object object : mapReactions.keySet()) {
            for (EscherReaction reaction : parseReaction(object, (JSONObject) mapReactions.get(object),
                    escherMap)) {
                escherMap.addReaction(reaction);
            }
        }
    }

    /*
     * Labels
     */
    JSONObject mapText = (JSONObject) parts.get(EscherKeywords.text_labels.name());
    if (mapText != null) {
        for (Object object : mapText.keySet()) {
            escherMap.addTextLabel(parseTextLabel(object, (JSONObject) mapText.get(object)));
        }
    }
    return escherMap;
}

From source file:eu.riscoss.rdc.RDCGithub.java

private void parseJsonRepo(JSONAware jv, String entity, Map<String, RiskData> values) {
    final long MILLISEC_YEAR = 365L * 24 * 3600 * 1000;
    if (jv instanceof JSONObject) {
        JSONObject jo = (JSONObject) jv;
        for (Object key : jo.keySet()) {
            //System.out.println(key+" \t"+jo.get(key) );
            if (keys.keySet().contains(key.toString()) && (jo.get(key) != null)) {
                String value = jo.get(key).toString();

                if ("number".equals(keys.get(key.toString()))) {
                    try {
                        double d = Double.parseDouble(value);
                        RiskData rd = new RiskData(GITHUB_PREFIX + key.toString(), entity, new Date(),
                                RiskDataType.NUMBER, d);
                        values.put(rd.getId(), rd);

                        //hard-coded size value
                        if (key.toString().equals("size")) {
                            rd = new RiskData("size", entity, new Date(), RiskDataType.NUMBER, d);
                            values.put(rd.getId(), rd);
                        }/*from  ww w. j ava2s.co  m*/
                    } catch (Exception ex) {
                        ex.printStackTrace();
                    }
                } else if ("boolean".equals(keys.get(key.toString()))) {
                    try {
                        boolean b = Boolean.parseBoolean(value);
                        RiskData rd = new RiskData(GITHUB_PREFIX + key.toString(), entity, new Date(),
                                RiskDataType.NUMBER, (b ? 1 : 0));
                        values.put(rd.getId(), rd);
                    } catch (Exception ex) {
                        ex.printStackTrace();
                    }
                } else if ("date".equals(keys.get(key.toString()))) {
                    try {
                        value = value.replaceAll("T", " ");
                        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd H:m:s");
                        Date date = formatter.parse(value);
                        RiskData rd = new RiskData(GITHUB_PREFIX + key.toString(), entity, new Date(),
                                RiskDataType.NUMBER, date.getTime());
                        values.put(rd.getId(), rd);

                        //calculate also the repository age!
                        if (key.toString().equals("created_at")) {
                            long datediff = new Date().getTime() - date.getTime();
                            double years = (double) datediff / MILLISEC_YEAR;
                            rd = new RiskData(GITHUB_PREFIX + "repository_age_years", entity, new Date(),
                                    RiskDataType.NUMBER, years);
                            values.put(rd.getId(), rd);
                        }
                    } catch (Exception ex) {
                        ex.printStackTrace();
                    }
                }
                //object currently not implemented in the RDR
                //implementation: hardcoded, as boolean, adding "has_" (see below)
                //                     else if( "object".equals( keys.get( key.toString() ) ) ) {
                //                           RiskData rd = new RiskData( GITHUB_PREFIX + key.toString(), entity, new Date(), RiskDataType.NUMBER, 1 );
                //                           values.put( rd.getId(), rd );                     
                //                     }
            }
            if (key.toString().equals("license")) {
                RiskData rd;
                if (jo.get(key) == null)
                    rd = new RiskData(GITHUB_PREFIX + "has_license", entity, new Date(), RiskDataType.NUMBER,
                            0);
                else
                    rd = new RiskData(GITHUB_PREFIX + "has_license", entity, new Date(), RiskDataType.NUMBER,
                            1);
                values.put(rd.getId(), rd);
            }

        }
    }
}

From source file:it.uniud.ailab.dcore.annotation.annotators.WikipediaInferenceAnnotator.java

/**
 * Fill the hypernyms and related link maps by querying Wikipedia. The hypernyms
 * map contains the categories found for every page, while the related map
 * contains the related links.//from   ww w  .j av a 2s  .  c  o m
 * 
 * @param grams the grams to analyze.
 */
private void findHyperymsAndRelated(List<Keyphrase> grams) {

    HttpURLConnection con = null;
    BufferedReader reader = null;

    // We may pipe several article titles in one query, but for some awkward reason,
    // the API won't give us the full category list of the requested terms, nor the full definition
    for (Keyphrase currentGram : grams) {

        // this will contain the categories (i.e. our hypernyms)s
        ArrayList<String> wikiCategories = new ArrayList<>();

        // this will contain the related links
        ArrayList<String> wikiLinks = new ArrayList<>();

        String page = ((UriAnnotation) currentGram.getAnnotation(WIKIURI)).getUriTitle();

        /*
        // get the correct annotation that generated the wikiflag
        TextAnnotation a = (TextAnnotation) currentGram.getTokens().get(0).
            getAnnotation(WIKIFLAG);
                
        // the annotations have the same length, so we may have a legit
        // wikipedia surface as the gram
                
        if (a.getTokens().length == currentGram.getTokens().size()) {
                
        boolean isTagged = true;
                
        for (int i = 0; i < a.getTokens().length && isTagged; i++) {
            isTagged = a.getTokens()[i].equals(
                    currentGram.getTokens().get(i));
        }
                
        if (isTagged) {
            page = a.getAnnotation();
        }
        }*/

        if (page == null)
            throw new AnnotationException(this, "I couldn't find the correct annotation.");

        page = page.replaceAll(" ", "_");

        // do the query and save the retrieved json in an object.
        String queryAddress = String.format("https://%s.%s%s", componentLocale.getLanguage(), wikipediaQuery,
                page);

        try {

            con = (HttpURLConnection) (new URL(queryAddress)).openConnection();
            con.setRequestProperty("User-Agent", userAgent);
            con.setRequestMethod("GET");
            reader = new BufferedReader(new InputStreamReader(con.getInputStream()));
            Object json = (new JSONParser()).parse(reader);
            // closing connection
            con.disconnect();
            // The retrieved JSON is something like:
            //
            // "query": {
            //        "pages": {
            //            "<PAGE ID NUMBER>": {
            //                "pageid": "<PAGE ID NUMBER>",
            //                "ns": 0,
            //                "title": "<PAGE TITLE>",
            //                "categories": [
            //                    {
            //                        "ns": 14,
            //                        "title": "Category:<CATEGORY 1>"
            //                    },
            //                    {
            //                        "ns": 14,
            //                        "title": "Category:<CATEGORY 2>"
            //                    },
            //                    {
            //                        "ns": 14,
            //                        "title": "Category:<CATEGORY 3>"
            //                    }
            //                ],
            //                "extract":"<TEXT>",
            //                "links": [
            //                    {
            //                        "ns": 0,
            //                        "title": "<LINK 1>"
            //                    },
            //                    {
            //                        "ns": 0,
            //                        "title": "<LINK 2>"
            //                    },
            //                    {
            //                        "ns": 0,
            //                        "title": "<LINK 3>"
            //                    }
            //                 ]
            //            }
            //        }
            //    }
            //}
            // note that NOT ALL the wikis have the "extract" property in the API
            // therefore we may not assume that it will always be there
            JSONObject queryblock = (JSONObject) json;
            JSONObject pagesBlock = (JSONObject) queryblock.get("query");
            JSONObject idBlock = (JSONObject) pagesBlock.get("pages");

            // if we pipe'd more than one title, we'll have more than one pageId entry
            for (Iterator it = idBlock.keySet().iterator(); it.hasNext();) {

                String pageId = (String) it.next();
                JSONObject block = (JSONObject) idBlock.get(pageId);
                // finally... The Categories!
                JSONArray categories = (JSONArray) block.get("categories");
                if (categories != null) {
                    Iterator<JSONObject> iterator = categories.iterator();
                    while (iterator.hasNext()) {
                        JSONObject category = (iterator.next());
                        String catName = (String) category.get("title");
                        catName = catName.replaceFirst("Category:", "");
                        catName = catName.replaceFirst("Categoria:", "");
                        if (!catName.toLowerCase().contains("stub") && !catName.contains("Featured Articles")
                                && !catName.toLowerCase().contains("disambiguation")) {
                            //System.out.println(catName);
                            if (!wikiCategories.contains(catName) && !blackTerms.contains(catName)) {
                                wikiCategories.add(catName);
                            }
                        }
                    }
                }

                // We can find related entities in the text
                // many articles have a "See Also" section that begins with
                //          <h2>See also</h2>\n<ul>
                // and ends with:
                //          </ul>

                // To retrieve these links, we don't need to scrap HTML.
                // We can just read the list of links included in the JSON
                // the drawback of this approach is that some pages have huge
                // amounts of links and many of them are uninteresting

                // For example, almost any page has a reference to the
                // definition of ISBN (contained in the references)
                // or of some other kind of wide-used identifier such as:
                // Pub-Med index,
                // Digital-Object-Identifier,
                // International Standard Book Number,
                // Wikisource, and so on.

                JSONArray links = (JSONArray) block.get("links");
                if (links != null) {
                    Iterator<JSONObject> iterator = links.iterator();
                    while (iterator.hasNext()) {
                        JSONObject link = (iterator.next());
                        String linkname = (String) link.get("title");

                        if (!wikiLinks.contains(linkname) && !blackTerms.contains(linkname)) {
                            wikiLinks.add(linkname);
                        }

                    }
                }
            }

        } catch (ParseException ex) {
            throw new AnnotationException(this, "Error while parsing JSON by Wikipedia for page: " + page, ex);
        } catch (MalformedURLException ex) {
            throw new AnnotationException(this, "Malformed Wikipedia URL: " + queryAddress, ex);
        } catch (IOException ex) {
            throw new AnnotationException(this, "Error while reading Wikipedia", ex);
        } finally {
            try {
                if (reader != null)
                    reader.close();
            } catch (IOException ex) {
                throw new AnnotationException(this, "Error while reading Wikipedia", ex);
            }
        }

        // Update the results.

        // How does it work? The strenght of an hypernym or related concept
        // is the sum of all the scores of the KPs which generate it.

        // So, for example, if the KPs "Software" and "Engineering" have
        // both score 0.5, and both have the related link "Software Engineering", 
        // the strength of the "Software Engineering" related concept is 
        // going to be 1.

        for (String cat : wikiCategories) {

            if (hypernyms.containsKey(cat)) {
                hypernyms.replace(cat, hypernyms.get(cat) + currentGram.getFeature(SCORE));
            } else {
                hypernyms.put(cat, currentGram.getFeature(SCORE));
            }

        }

        for (String rel : wikiLinks) {
            if (related.containsKey(rel)) {
                related.replace(rel, related.get(rel) + currentGram.getFeature(SCORE));
            } else
                related.put(rel, currentGram.getFeature(SCORE));
        }

    } // for (Gram currentGram : grams)
}

From source file:at.ac.tuwien.dsg.rSybl.cloudInteractionUnit.enforcementPlugins.dryRun.DryRunEnforcementAPI.java

public void readParameters() {
    JSONParser parser = new JSONParser();

    try {//from   w  w  w .  j  a v  a2 s  .c o  m
        InputStream inputStream = Configuration.class.getClassLoader()
                .getResourceAsStream("config/resources.json");
        Object obj = parser.parse(new InputStreamReader(inputStream));

        JSONObject jsonObject = (JSONObject) obj;

        for (Object p : jsonObject.keySet()) {
            String pluginName = (String) p;
            JSONObject plugin = (JSONObject) jsonObject.get(pluginName);
            if (pluginName.toLowerCase().contains("dry")) {
                for (Object a : plugin.keySet()) {
                    String actionName = (String) a;
                    JSONObject action = (JSONObject) plugin.get(actionName);
                    JSONArray jSONArray = (JSONArray) action.get("parameters");
                    for (int i = 0; i < jSONArray.size(); i++) {
                        parameters.add((String) jSONArray.get(i));
                    }

                }
            }
        }
    } catch (Exception e) {
        RuntimeLogger.logger.info(e.getMessage());
    }
}

From source file:io.fabric8.mq.autoscaler.MQAutoScaler.java

private ObjectName getBrokerJMXRoot(J4pClient client) throws Exception {
    String type = "org.apache.activemq:brokerName=*,type=Broker";
    String attribute = "BrokerName";
    ObjectName objectName = new ObjectName(type);
    J4pResponse<J4pReadRequest> result = client.execute(new J4pReadRequest(objectName, attribute));
    JSONObject jsonObject = result.getValue();
    return new ObjectName(jsonObject.keySet().iterator().next().toString());

}

From source file:io.fabric8.mq.autoscaler.MQAutoScaler.java

private void bounceBroker(BrokerVitalSigns broker) throws Exception {
    if (broker.getTotalConnections() > 0) {
        ObjectName root = broker.getRoot();
        Hashtable<String, String> props = root.getKeyPropertyList();
        props.put("connector", "clientConnectors");
        props.put("connectorName", "*");
        String objectName = root.getDomain() + ":" + getOrderedProperties(props);

        /**/*from w  w w  .jav a2 s.c o  m*/
         * not interested in StatisticsEnabled, just need a real attribute so we can get the root which we
         * can execute against
         */

        List<String> roots = new ArrayList<>();
        J4pResponse<J4pReadRequest> response = broker.getClient()
                .execute(new J4pReadRequest(objectName, "StatisticsEnabled"));
        JSONObject value = response.getValue();
        for (Object key : value.keySet()) {
            roots.add(key.toString());
        }

        for (String key : roots) {
            broker.getClient().execute(new J4pExecRequest(key, "stop"));
            LOG.info("Stopping all clients " + " on broker " + broker.getBrokerIdentifier() + ": connector = "
                    + key);
        }
        Thread.sleep(1000);
        for (String key : roots) {
            broker.getClient().execute(new J4pExecRequest(key, "start"));
        }
    }
}

From source file:io.fabric8.mq.autoscaler.MQAutoScaler.java

private BrokerVitalSigns populateDestinations(DestinationVitalSigns.Type type,
        BrokerVitalSigns brokerVitalSigns) {

    try {//  ww w  .ja  v  a 2 s.  com
        ObjectName root = brokerVitalSigns.getRoot();
        Hashtable<String, String> props = root.getKeyPropertyList();
        props.put("destinationType", type == DestinationVitalSigns.Type.QUEUE ? "Queue" : "Topic");
        props.put("destinationName", "*");
        String objectName = root.getDomain() + ":" + getOrderedProperties(props);

        J4pResponse<J4pReadRequest> response = brokerVitalSigns.getClient()
                .execute(new J4pReadRequest(objectName, "Name", "QueueSize", "ConsumerCount", "ProducerCount"));
        JSONObject value = response.getValue();
        for (Object key : value.keySet()) {
            //get the destinations
            JSONObject jsonObject = (JSONObject) value.get(key);
            String name = jsonObject.get("Name").toString();
            String producerCount = jsonObject.get("ProducerCount").toString().trim();
            String consumerCount = jsonObject.get("ConsumerCount").toString().trim();
            String queueSize = jsonObject.get("QueueSize").toString().trim();

            if (!name.contains("Advisory")
                    && !name.contains(ActiveMQDestination.TEMP_DESTINATION_NAME_PREFIX)) {
                ActiveMQDestination destination = type == DestinationVitalSigns.Type.QUEUE
                        ? new ActiveMQQueue(name)
                        : new ActiveMQTopic(name);
                DestinationVitalSigns destinationVitalSigns = new DestinationVitalSigns(destination);
                destinationVitalSigns.setNumberOfConsumers(Integer.parseInt(consumerCount));
                destinationVitalSigns.setNumberOfProducers(Integer.parseInt(producerCount));
                destinationVitalSigns.setQueueDepth(Integer.parseInt(queueSize));
                brokerVitalSigns.addDestinationVitalSigns(destinationVitalSigns);
            }
        }
    } catch (Exception ex) {
        // Destinations don't exist yet on the broker
        LOG.debug("populateDestinations failed", ex);
    }
    return brokerVitalSigns;
}