Example usage for org.apache.commons.collections.map LinkedMap LinkedMap

List of usage examples for org.apache.commons.collections.map LinkedMap LinkedMap

Introduction

In this page you can find the example usage for org.apache.commons.collections.map LinkedMap LinkedMap.

Prototype

public LinkedMap() 

Source Link

Document

Constructs a new empty map with default size and load factor.

Usage

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public static LinkedMap readDistinctElementsIntoMap(String pairsFilename) {
    File pairsFile = new File(pairsFilename);
    LinkedMap phenotypeIndexMap = new LinkedMap();
    try {/*from w  ww  . j  a va  2 s  . c om*/
        List<String> fileWithPairs = FileUtils.readLines(pairsFile); //Read one at a time to consume less memory
        int index = 0;
        for (String s : fileWithPairs) {
            //distinctElementsSet.add(s.split("\t")[0]);
            //distinctElementsSet.add(s.split("\t")[1]);
            if (!phenotypeIndexMap.containsKey(s.split("\t")[0])) {
                phenotypeIndexMap.put(s.split("\t")[0], index);
                index++;
            }
        }
        for (String s : fileWithPairs) {
            if (!phenotypeIndexMap.containsKey(s.split("\t")[1])) {
                phenotypeIndexMap.put(s.split("\t")[1], index);
                index++;
            }
        }
        System.out.println("Index " + index);
    } catch (IOException e) {
        System.out.println("Error while reading/writing file with pairs" + e.getMessage());
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }
    return phenotypeIndexMap;
}

From source file:de.cosmocode.rendering.CollectionRenderer.java

@Override
public Renderer map() throws RenderingException {
    mode.checkAllowed(Mode.MAP);/*from w  w  w  .j a  v  a2 s.com*/
    stack.push(new LinkedMap());
    mode = Mode.MAP;
    return this;
}

From source file:com.jaspersoft.jasperserver.api.engine.jasperreports.util.JRQueryExecuterAdapter.java

public static OrderedMap executeQuery(final Query query, final String keyColumn, final String[] resultColumns,
        Map parameterValues, Map<String, Class<?>> parameterTypes, List additionalParameters,
        boolean formatValueColumns) {
    try {//from ww  w  .j a va2  s  .  com
        JRQueryExecuterFactory queryExecuterFactory = JRQueryExecuterUtils
                .getQueryExecuterFactory(query.getLanguage());

        JRParameter[] dsParameters = getDatasetParameters(queryExecuterFactory, parameterValues, parameterTypes,
                additionalParameters);
        JRField[] fields = getDatasetFields(keyColumn, resultColumns);
        JRQuery dsQuery = makeReportQuery(query);
        JSDataset dataset = new JSDataset(query.getName(), dsParameters, fields, dsQuery);

        Map parametersMap = new HashMap();
        for (int i = 0; i < dsParameters.length; i++) {
            JRParameter parameter = dsParameters[i];
            parametersMap.put(parameter.getName(), parameter);
        }

        JRQueryExecuter executer = queryExecuterFactory.createQueryExecuter(dataset, parametersMap);
        try {
            JRDataSource ds = executer.createDatasource();
            OrderedMap values = new LinkedMap();
            while (ds.next()) {
                Object valueColumn = ds.getFieldValue(dataset.getField(keyColumn));

                Object[] visibleColumnValues = new Object[resultColumns.length];
                for (int idx = 0; idx < resultColumns.length; ++idx) {
                    Object fieldValue = ds.getFieldValue(dataset.getField(resultColumns[idx]));
                    visibleColumnValues[idx] = convertColumnValue(fieldValue, formatValueColumns);
                }

                values.put(valueColumn, convertVisibleColumnsValues(visibleColumnValues, formatValueColumns));
            }

            return values;
        } finally {
            executer.close();
        }
    } catch (JRException e) {
        log.error("Error while executing query", e);
        throw new JSExceptionWrapper(e);
    }

}

From source file:edu.harvard.iq.dvn.core.web.StudyListing.java

public static String addToStudyListingMap(StudyListing sl, Map sessionMap) {
    Long slCount = (Long) sessionMap.get("studyListingsCount");
    OrderedMap slMap = (OrderedMap) sessionMap.get("studyListings");
    String sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false))
            .getId();//from   w  w  w. j  a v a 2 s  . c o  m

    if (slCount == null) {
        slCount = new Long(0);

    } else {
        slCount = slCount + 1;
    }

    if (slMap == null) {
        slMap = new LinkedMap();
        sessionMap.put("studyListings", slMap);
    }

    sessionMap.put("studyListingsCount", slCount);
    String newIndex = slCount + "_" + sessionId;
    slMap.put(newIndex, sl);

    if (slMap.size() > 5) {
        slMap.remove(slMap.firstKey());
    }

    return newIndex;
}

From source file:de.innovationgate.wgpublisher.webtml.portlet.TMLPortlet.java

/**
 * Queue that stores the last 1000 fired portlet events, so portlets can
 * react on them with serverside code./*from   w ww .jav a 2  s.c o  m*/
 * @param session
 * @return
 */
public static LinkedMap getFiredEventsQueue(HttpSession session) {

    synchronized (session) {
        @SuppressWarnings("unchecked")
        TransientObjectWrapper<LinkedMap> eventWrapper = (TransientObjectWrapper<LinkedMap>) session
                .getAttribute(WGACore.SESSION_FIREDPORTLETEVENTS);
        if (eventWrapper == null || eventWrapper.get() == null) {
            eventWrapper = new TransientObjectWrapper<LinkedMap>();
            eventWrapper.set(new LinkedMap());
            session.setAttribute(WGACore.SESSION_FIREDPORTLETEVENTS, eventWrapper);
        }
        return eventWrapper.get();
    }

}

From source file:de.innovationgate.webgate.api.templates.QueryableSource.java

public Map find(String type, String query, Map parameters) throws WGAPIException {
    List results = find(query);//from  w  w w  . j a  v  a2  s.  c  o  m
    if (results == null) {
        return null;
    }
    LinkedMap resultMap = new LinkedMap();
    int keyNr = 0;
    Iterator resultsIt = results.iterator();
    Object result;
    while (resultsIt.hasNext()) {
        result = resultsIt.next();
        resultMap.put(new QueryableSourceKey(++keyNr), result);
    }
    return resultMap;

}

From source file:de.innovationgate.webgate.api.templates.LazyBeanList.java

public List subList(int arg0, int arg1) {

    Iterator keys = _keys.subList(arg0, arg1).iterator();
    Map subMap = new LinkedMap();
    Object key;//from ww w .j  av a 2s .  c o  m
    while (keys.hasNext()) {
        key = keys.next();
        subMap.put(key, _contents.get(key));
    }

    return new LazyBeanList(_db, _folder, subMap);
}

From source file:de.innovationgate.webgate.api.rss2.SimpleRSS.java

public Map find(String folder, String query, Map parameters) throws WGQueryException {

    URL rssURL;/*w w w .  j  a  v a 2s. co m*/
    ChannelIF channel = null;
    NativeQueryOptions nativeOptions = new NativeQueryOptions(
            (String) parameters.get(WGDatabase.QUERYOPTION_NATIVEOPTIONS));

    try {
        // rssURL         = new URL(query);
        channel = FeedParser.parse(new ChannelBuilder(), retrievePage(query, nativeOptions));
        if (channel != null) {
            _channel.set(channel);
        } else {
            throw new WGQueryException("Unable to retrieve or parse feed", query);
        }
    } catch (Exception e) {
        throw new WGQueryException("Exception retrieving or parsing feed", query, e);
    }

    List items = new ArrayList(channel.getItems());
    Map itemMap = new LinkedMap();
    Iterator iter = items.iterator();

    while (iter.hasNext()) {
        Item item = (Item) iter.next();
        itemMap.put((new Long(item.getId())).toString(), new ItemWrapper(item));
    }
    return itemMap;
}

From source file:co.turnus.analysis.bottlenecks.util.HotspotsDataAnalyser.java

@SuppressWarnings("unchecked")
public <T> Map<T, ExtendExecData> getSumDataMap(Class<T> type, Key key, Order order) {
    Map<T, ExtendExecData> tmpMap = Maps.newHashMap();
    List<Map.Entry<T, ExtendExecData>> list = new ArrayList<>();

    if (type.isAssignableFrom(Actor.class)) {
        for (Actor actor : network.getActors()) {
            tmpMap.put((T) actor, getSumData(actor));
        }/*from ww w.ja  v  a2s.c  o  m*/
        list.addAll(tmpMap.entrySet());
    } else if (type.isAssignableFrom(ActorClass.class)) {
        for (ActorClass clazz : network.getActorClasses()) {
            tmpMap.put((T) clazz, getSumData(clazz));
        }
        list.addAll(tmpMap.entrySet());
    } else if (type.isAssignableFrom(Action.class)) {
        Collection<Action> actions = Sets.newHashSet();
        for (ActorClass clazz : network.getActorClasses()) {
            actions.addAll(clazz.getActions());
        }
        for (Action action : actions) {
            tmpMap.put((T) action, getSumData(action));
        }
        list.addAll(tmpMap.entrySet());
    } else {
        return null;
    }

    mapComparator.setSorting(key, order);
    Collections.sort(list, mapComparator);

    Map<T, ExtendExecData> data = new LinkedMap();
    for (Entry<T, ExtendExecData> entry : list) {
        data.put(entry.getKey(), entry.getValue());
    }
    return data;
}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public void readDistinctElementsFromPairsAddClass(String pairsFilepath) {
    //readDistinctElementsIntoList
    List<Object> distinctElements = readDistinctElementsIntoList(pairsFilepath);
    System.out.println("Size of distinctElements" + distinctElements.size());
    for (int i = 0; i < distinctElements.size(); i++) {
        System.out.println("distinctElements " + i + " " + distinctElements.get(i));
    }//w  w  w.  ja v a  2 s .c o m

    //get class for those distinct elements from original cohort file
    String originalFile = "data/cohort1/bio_nlp/cohort1_s.txt";
    BufferedReader br = null;
    String thisLine;
    String[] lineArray;
    LinkedMap originalMap = new LinkedMap();
    BufferedWriter distinctPriorityPairsWriter = null;

    try {
        br = new BufferedReader(new FileReader(originalFile));
        while ((thisLine = br.readLine()) != null) {
            thisLine = thisLine.trim();
            if (thisLine.equals(""))
                continue;

            lineArray = thisLine.split("\t");
            originalMap.put(lineArray[3], lineArray[1]);
        }

        //write distinct elements with class to an output file
        StringBuffer outfileBuffer = new StringBuffer();
        for (int i = 0; i < distinctElements.size(); i++)
            outfileBuffer.append(distinctElements.get(i)).append("\t")
                    .append(originalMap.get(distinctElements.get(i)) + "\n");

        distinctPriorityPairsWriter = new BufferedWriter(
                new FileWriter(pairsFilepath.split("\\.")[0] + "_distinct_with_class.txt"));

        distinctPriorityPairsWriter.append(outfileBuffer.toString());
        outfileBuffer.setLength(0);
        distinctPriorityPairsWriter.flush();

    } catch (IOException io) {
        try {
            if (br != null)
                br.close();
            io.printStackTrace();
        } catch (IOException e) {
            System.out.println("Problem occured while closing output stream " + br);
            e.printStackTrace();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}