Example usage for org.apache.commons.collections MultiHashMap put

List of usage examples for org.apache.commons.collections MultiHashMap put

Introduction

In this page you can find the example usage for org.apache.commons.collections MultiHashMap put.

Prototype

public Object put(Object key, Object value) 

Source Link

Document

Adds the value to the collection associated with the specified key.

Usage

From source file:com.cyclopsgroup.waterview.DummyTest.java

/**
 * Dummy  test case/* ww  w  .  j  ava2s  .  co m*/
 */
public void testDummy() {
    MultiHashMap map = new MultiHashMap();
    map.put("a", "a1");
    map.put("a", "a2");
    Properties p = new Properties();
    p.putAll(map);
    System.out.println(p.getProperty("a"));
}

From source file:com.opensymphony.webwork.util.classloader.monitor.FilesystemAlterationMonitor.java

public void addListener(final FilesystemAlterationListener listener, final File directory) {
    synchronized (mutexListeners) {
        // listerner -> dir1, dir2, dir3

        final MultiHashMap newListeners = new MultiHashMap(listeners);
        newListeners.put(listener, directory);
        listeners = newListeners;//  ww w.  ja  v  a 2  s  . com

        // directory -> listener1, listener2, listener3
        final MultiHashMap newDirectories = new MultiHashMap(directories);
        newDirectories.put(directory, listener);
        directories = newDirectories;
    }
}

From source file:org.geoserver.wfs.xml.GML2OutputFormat2.java

protected void write(FeatureCollectionResponse results, OutputStream output, Operation getFeature)
        throws ServiceException, IOException {

    //declare wfs schema location
    GetFeatureRequest gft = GetFeatureRequest.adapt(getFeature.getParameters()[0]);

    List featureCollections = results.getFeature();

    //round up the info objects for each feature collection
    MultiHashMap ns2metas = new MultiHashMap();

    for (Iterator fc = featureCollections.iterator(); fc.hasNext();) {
        SimpleFeatureCollection features = (SimpleFeatureCollection) fc.next();
        SimpleFeatureType featureType = features.getSchema();

        //load the metadata for the feature type
        String namespaceURI = featureType.getName().getNamespaceURI();
        FeatureTypeInfo meta = catalog.getFeatureTypeByName(namespaceURI, featureType.getTypeName());
        if (meta == null)
            throw new WFSException(gft, "Could not find feature type " + namespaceURI + ":"
                    + featureType.getTypeName() + " in the GeoServer catalog");

        NamespaceInfo ns = catalog.getNamespaceByURI(namespaceURI);
        ns2metas.put(ns, meta);
    }/*from   w  w  w  .j  a  va 2s .c  o m*/

    Collection<FeatureTypeInfo> featureTypes = ns2metas.values();

    //create the encoder
    ApplicationSchemaXSD xsd = new ApplicationSchemaXSD(null, catalog, gft.getBaseUrl(),
            org.geotools.wfs.v1_0.WFS.getInstance(), featureTypes);
    Configuration configuration = new ApplicationSchemaConfiguration(xsd,
            new org.geotools.wfs.v1_0.WFSConfiguration());

    Encoder encoder = new Encoder(configuration);
    //encoder.setEncoding(wfs.getCharSet());

    encoder.setSchemaLocation(org.geoserver.wfs.xml.v1_1_0.WFS.NAMESPACE,
            buildSchemaURL(gft.getBaseUrl(), "wfs/1.0.0/WFS-basic.xsd"));

    //declare application schema namespaces
    Map<String, String> params = params("service", "WFS", "version", "1.0.0", "request", "DescribeFeatureType");
    for (Iterator i = ns2metas.entrySet().iterator(); i.hasNext();) {
        Map.Entry entry = (Map.Entry) i.next();

        NamespaceInfo ns = (NamespaceInfo) entry.getKey();
        String namespaceURI = ns.getURI();

        Collection metas = (Collection) entry.getValue();

        StringBuffer typeNames = new StringBuffer();

        for (Iterator m = metas.iterator(); m.hasNext();) {
            FeatureTypeInfo meta = (FeatureTypeInfo) m.next();
            typeNames.append(meta.getPrefixedName());

            if (m.hasNext()) {
                typeNames.append(",");
            }
        }

        //set the schema location
        params.put("typeName", typeNames.toString());
        encoder.setSchemaLocation(namespaceURI, buildURL(gft.getBaseUrl(), "wfs", params, URLType.RESOURCE));
    }

    encoder.encode(results.getAdaptee(), org.geotools.wfs.v1_0.WFS.FeatureCollection, output);
}

From source file:org.geotools.xml.impl.BindingPropertyExtractor.java

public List properties(Object object, XSDElementDeclaration element) {
    List properties = new ArrayList();

    //first get all the properties that can be infered from teh schema
    List children = encoder.getSchemaIndex().getChildElementParticles(element);

    O: for (Iterator itr = children.iterator(); itr.hasNext();) {
        XSDParticle particle = (XSDParticle) itr.next();
        XSDElementDeclaration child = (XSDElementDeclaration) particle.getContent();

        if (child.isElementDeclarationReference()) {
            child = child.getResolvedElementDeclaration();
        }/*from ww w.j av a2 s  .  c o  m*/

        //get the object(s) for this element 
        GetPropertyExecutor executor = new GetPropertyExecutor(object, child);

        BindingVisitorDispatch.walk(object, encoder.getBindingWalker(), element, executor, context);

        if (executor.getChildObject() != null) {
            properties.add(new Object[] { particle, executor.getChildObject() });
        }
    }

    //second, get the properties which cannot be infereed from the schema
    GetPropertiesExecutor executor = new GetPropertiesExecutor(object, element);

    BindingVisitorDispatch.walk(object, encoder.getBindingWalker(), element, executor, context);

    if (!executor.getProperties().isEmpty()) {
        //group into a map of name, list
        MultiHashMap map = new MultiHashMap();

        for (Iterator p = executor.getProperties().iterator(); p.hasNext();) {
            Object[] property = (Object[]) p.next();
            map.put(property[0], property[1]);
        }

        //turn each map entry into a particle
        HashMap particles = new HashMap();

        for (Iterator e = map.entrySet().iterator(); e.hasNext();) {
            Map.Entry entry = (Map.Entry) e.next();

            //key could be a name or a particle
            if (entry.getKey() instanceof XSDParticle) {
                XSDParticle particle = (XSDParticle) entry.getKey();
                particles.put(Schemas.getParticleName(particle), particle);
                continue;
            }

            QName name = (QName) entry.getKey();
            Collection values = (Collection) entry.getValue();

            //check for comment
            if (Encoder.COMMENT.equals(name)) {
                //create a dom element which text nodes for the comments
                Element comment = encoder.getDocument().createElement(Encoder.COMMENT.getLocalPart());

                for (Iterator v = values.iterator(); v.hasNext();) {
                    comment.appendChild(encoder.getDocument().createTextNode(v.next().toString()));
                }

                XSDParticle particle = XSDFactory.eINSTANCE.createXSDParticle();

                XSDElementDeclaration elementDecl = XSDFactory.eINSTANCE.createXSDElementDeclaration();
                elementDecl.setTargetNamespace(Encoder.COMMENT.getNamespaceURI());
                elementDecl.setName(Encoder.COMMENT.getLocalPart());
                elementDecl.setElement(comment);

                particle.setContent(elementDecl);
                particles.put(name, particle);

                continue;
            }

            //find hte element 
            XSDElementDeclaration elementDecl = encoder.getSchemaIndex().getElementDeclaration(name);

            if (elementDecl == null) {
                //look for the element declaration as a particle of the containing type
                XSDParticle particle = Schemas.getChildElementParticle(element.getType(), name.getLocalPart(),
                        true);
                if (particle != null) {
                    particles.put(name, particle);
                    continue;
                }
            }

            if (elementDecl == null) {
                //TODO: resolving like this will return an element no 
                // matter what, modifying the underlying schema, this might
                // be dangerous. What we shold do is force the schema to 
                // resolve all of it simports when the encoder starts
                elementDecl = encoder.getSchema().resolveElementDeclaration(name.getNamespaceURI(),
                        name.getLocalPart());
            }

            //look for a particle in the containing type which is either 
            // a) a base type of the element
            // b) in the same subsittuion group
            // if found use the particle to dervice multiplicity
            XSDParticle reference = null;
            for (Iterator p = Schemas.getChildElementParticles(element.getType(), true).iterator(); p
                    .hasNext();) {
                XSDParticle particle = (XSDParticle) p.next();
                XSDElementDeclaration el = (XSDElementDeclaration) particle.getContent();
                if (el.isElementDeclarationReference()) {
                    el = el.getResolvedElementDeclaration();
                }

                if (Schemas.isBaseType(elementDecl, el)) {
                    reference = particle;
                    break;
                }
            }

            //wrap the property in a particle
            XSDParticle particle = XSDFactory.eINSTANCE.createXSDParticle();
            XSDElementDeclaration wrapper = XSDFactory.eINSTANCE.createXSDElementDeclaration();
            wrapper.setResolvedElementDeclaration(elementDecl);
            particle.setContent(wrapper);
            //particle.setContent(elementDecl);

            //if there is a reference, derive multiplicity
            if (reference != null) {
                particle.setMaxOccurs(reference.getMaxOccurs());
            } else {
                //dervice from collection
                if (values.size() > 1) {
                    //make a multi property
                    particle.setMaxOccurs(-1);
                } else {
                    //single property
                    particle.setMaxOccurs(1);
                }
            }

            particles.put(name, particle);
        }

        //process the particles in order in which we got the properties
        for (Iterator p = executor.getProperties().iterator(); p.hasNext();) {
            Object[] property = (Object[]) p.next();
            Collection values = (Collection) map.get(property[0]);

            QName name;
            if (property[0] instanceof XSDParticle) {
                name = Schemas.getParticleName((XSDParticle) property[0]);
            } else {
                name = (QName) property[0];
            }

            XSDParticle particle = (XSDParticle) particles.get(name);

            if (particle == null) {
                continue; //already processed, must be a multi property
            }

            if (values.size() > 1) {
                //add as is, the encoder will unwrap
                properties.add(new Object[] { particle, values });
            } else {
                //unwrap it
                properties.add(new Object[] { particle, values.iterator().next() });
            }

            //done with this particle
            particles.remove(name);
        }
    }

    //return properties;        
    if (properties.size() <= 1) {
        return properties;
    }

    /*
     feature properties in the "properties" list may not be in the same order as they appear in the schema,
     because in the above implementation, simple attributes and complex attributes are processed separately.
              
     to maintain the feature properties order, sort the properties to their original order as in "children" list               
    */
    if (object instanceof ComplexAttributeImpl && propertiesSortable(properties, children)) {
        List sortedProperties = new ArrayList();

        //sort properties according to their XSDParticle order in "children"
        for (int i = 0; i < children.size(); i++) {
            XSDParticle particle = (XSDParticle) children.get(i);
            XSDElementDeclaration child = (XSDElementDeclaration) particle.getContent();
            if (child.getResolvedElementDeclaration() != null) {
                child = child.getResolvedElementDeclaration();
            }

            for (Iterator itr = properties.iterator(); itr.hasNext();) {
                Object[] prop = (Object[]) itr.next();
                XSDParticle part = (XSDParticle) prop[0];
                XSDElementDeclaration partContent = (XSDElementDeclaration) part.getContent();
                if (partContent.getResolvedElementDeclaration() != null) {
                    partContent = partContent.getResolvedElementDeclaration();
                }
                if (child.getName().equals(partContent.getName())
                        && ((child.getTargetNamespace() != null && partContent.getTargetNamespace() != null)
                                ? child.getTargetNamespace().equals(partContent.getTargetNamespace())
                                : true)) {
                    sortedProperties.add(prop);
                    properties.remove(prop);
                    i--;
                    break;
                }
            }
        }
        //return properties in order they appear in the schema
        return sortedProperties;
    } else {
        return properties;
    }
}

From source file:org.infoscoop.dao.TabLayoutDAO.java

/**
 * Return the MultiHashMap includes all the recoeds in tablayout table.
 *
 * @param resource/*from  w w  w  . j a v  a 2 s .c o  m*/
 * @return MultiHashMap
 *         <UL>
 *         <LI>key: tabId</LI>
 *         <LI>value: XmlObject List</LI>
 *         </UL>
 */
public MultiHashMap getTabLayout(final String tabId) {
    HibernateTemplate templete = super.getHibernateTemplate();

    MultiHashMap map = (MultiHashMap) templete.execute(new HibernateCallback() {

        public Object doInHibernate(Session session) throws HibernateException, SQLException {

            Criteria cri = session.createCriteria(TabLayout.class)
                    .add(Expression.eq(TabLayout.PROP_DELETEFLAG, TabLayout.DELETEFLAG_FALSE))
                    .add(Expression.eq("id.Temp", TabLayout.TEMP_FALSE));

            if (tabId != null) {
                if (tabId.equals("0")) {
                    cri.add(Expression.or(Expression.eq("id.Tabid", tabId),
                            Expression.eq("id.Tabid", "commandbar")));
                } else {
                    cri.add(Expression.eq("id.Tabid", tabId));
                }
            }
            cri.addOrder(Order.asc("id.Roleorder"));

            Map map = new MultiHashMap();
            TabLayout tablayout;
            for (Iterator ite = cri.list().iterator(); ite.hasNext();) {
                tablayout = (TabLayout) ite.next();
                map.put(tablayout.getId().getTabid(), tablayout);
            }

            return map;
        }
    });

    return map;
}

From source file:org.infoscoop.web.ProxyCredentialManageServlet.java

protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    String command = request.getParameter("command");
    String uid = (String) request.getSession().getAttribute("Uid");
    try {// ww  w  .  ja va 2  s .com
        if ("list".equals(command)) {
            response.setHeader("Content-Type", "text/xml; charset=UTF-8");
            List<AuthCredential> credentialList = AuthCredentialDAO.newInstance().select(uid);
            List<OAuthConsumerProp> consumers = OAuthConsumerDAO.newInstance().getConsumersByUid(uid);
            List<String> idList = new ArrayList<String>();
            try {
                JSONArray json = new JSONArray();
                for (Iterator<AuthCredential> it = credentialList.iterator(); it.hasNext();) {
                    AuthCredential c = (AuthCredential) it.next();
                    json.put(c.toJSON());
                }

                JSONObject oauthJSON;
                for (Iterator<OAuthConsumerProp> i = consumers.iterator(); i.hasNext();) {
                    oauthJSON = new JSONObject();
                    OAuthConsumerProp consumerProp = i.next();
                    String id = consumerProp.getId();
                    if (idList.contains(id))
                        continue;

                    oauthJSON.put("service_name", consumerProp.getServiceName());
                    oauthJSON.put("authType", "OAuth");
                    oauthJSON.put("description", consumerProp.getDescription());
                    Set<OAuthGadgetUrl> gadgetUrls = consumerProp.getOAuthGadgetUrl();
                    JSONArray gadgetUrlArr = new JSONArray();
                    for (Iterator<OAuthGadgetUrl> j = gadgetUrls.iterator(); j.hasNext();) {
                        gadgetUrlArr.put(j.next().getGadgetUrl());
                    }
                    oauthJSON.put("gadget_urls", gadgetUrlArr);
                    idList.add(id);

                    json.put(oauthJSON);
                }

                response.getWriter().write(json.toString());
                response.getWriter().flush();
            } catch (JSONException e) {
                log.error("", e);
                response.sendError(500);
            }
        } else if ("try".equals(command)) {
            response.setHeader("Content-Type", "text/xml; charset=UTF-8");

            String url = request.getParameter("url");
            String authType = request.getParameter("authType");
            String authCredentialId = AuthCredentialService.getHandle().detectCredential(uid, authType, url);
            if (authCredentialId != null) {
                response.getWriter().write(authCredentialId);
            } else {
                response.getWriter().write("cannot_detect_credential");
            }
            response.getWriter().flush();

        } else if ("add".equals(command)) {
            response.setHeader("Content-Type", "text/xml; charset=UTF-8");
            String authType = request.getParameter("authType");
            String authUid = request.getParameter("authUid");
            String authPasswd = request.getParameter("authPasswd");
            String authDomain = request.getParameter("authDomain");
            String url = request.getParameter("url");

            MultiHashMap headerMap = new MultiHashMap();
            Enumeration<String> headerNames = request.getHeaderNames();
            while (headerNames.hasMoreElements()) {
                String headerName = headerNames.nextElement();
                Enumeration<String> headers = request.getHeaders(headerName);
                while (headers.hasMoreElements()) {
                    headerMap.put(headerName, headers.nextElement());
                }
            }

            String authCredentialId = AuthCredentialService.getHandle().addCredential(uid, authType, authUid,
                    authPasswd, authDomain, url, headerMap);
            if (authCredentialId != null) {
                response.getWriter().write(authCredentialId);
            } else {
                response.getWriter().write("add_credential_failed");
            }
            response.getWriter().flush();
        } else if ("rst".equals(command)) {
            response.setHeader("Content-Type", "text/xml; charset=UTF-8");
            String credentialId = request.getParameter("id");
            String authPasswd = request.getParameter("authPasswd");
            String[] urlList = request.getParameterValues("url");
            Collection errorUrlList;
            errorUrlList = AuthCredentialService.getHandle().resetPassword(uid, credentialId, authPasswd,
                    urlList);

            JSONArray json = new JSONArray();
            for (Iterator it = errorUrlList.iterator(); it.hasNext();) {
                json.put((String) it.next());
            }
            response.getWriter().write(json.toString());
            //response.getWriter().write("reset_password_success");
            response.getWriter().flush();
        } else if ("frst".equals(command)) {
            String credentialId = request.getParameter("id");
            String authPasswd = request.getParameter("authPasswd");
            AuthCredentialService.getHandle().forceResetPassword(uid, credentialId, authPasswd);

        } else if ("del".equals(command)) {
            String credentialId = request.getParameter("id");
            AuthCredentialService.getHandle().removeCredential(uid, credentialId);
        } else if ("del_oauth".equals(command)) {
            String serviceName = request.getParameter("service_name");
            if (!OAuthService.getHandle().deleteOAuthTokens(uid, serviceName)) {
                OAuthService.getHandle().deleteOAuth2Tokens(uid, serviceName);
            }
        } else {
            response.sendError(500);
        }
    } catch (Exception e) {
        log.error("", e);
        response.sendError(500, e.getMessage());
    }
}

From source file:org.unigram.likelike.lsh.TestLSHRecommendations.java

private boolean dfsCheck(Configuration conf, Path outputPath) throws IOException {
    FileSystem fs = FileSystem.getLocal(conf);
    Path[] outputFiles = FileUtil.stat2Paths(fs.listStatus(outputPath, new OutputLogFilter()));

    for (Path outputFile : outputFiles) {
        if (!outputFile.getName().startsWith("part-")) {
            continue;
        }//from w w  w. ja  v a  2 s .co  m
        BufferedReader reader = this.asBufferedReader(fs.open(outputFile));
        String line;
        MultiHashMap resultMap = new MultiHashMap();
        while ((line = reader.readLine()) != null) {
            String[] lineArray = line.split("\t");
            resultMap.put(Long.parseLong(lineArray[0]), // target
                    Long.parseLong(lineArray[1])); // recommended

        }
        this.check(resultMap);
        return true;
    }
    return false;
}