Example usage for org.apache.commons.collections.map LinkedMap get

List of usage examples for org.apache.commons.collections.map LinkedMap get

Introduction

In this page you can find the example usage for org.apache.commons.collections.map LinkedMap get.

Prototype

public Object get(int index) 

Source Link

Document

Gets the key at the specified index.

Usage

From source file:de.innovationgate.wgpublisher.webtml.portlet.TMLPortletStateSessionStorage.java

@Override
public TMLPortletState getState(TMLPortlet portlet) throws WGAPIException {

    String completeKey = getSessionContextKey(portlet);

    synchronized (_session) {

        // Get - conditionally create session context map
        Map<String, TMLPortletState> contexts = getSessionMap();

        // Get - conditionally create individual session context
        TMLPortletState context = (TMLPortletState) contexts.get(completeKey);
        if (context == null) {
            context = portlet.createState(this);

            // Set event index to current last index, so events fired before creation of this context are not executed for it
            LinkedMap list = TMLPortlet.getFiredEventsQueue(_session);
            if (!list.isEmpty()) {
                PortletEvent event = (PortletEvent) list.get(list.lastKey());
                context.setLastProcessedEventIndex(event.getIndex());
            }//from www  .ja va2 s  .  c  o m

            contexts.put(completeKey, context);
        }
        return context;
    }

}

From source file:com.tilab.ca.sse.core.classify.Classifier.java

private List<ScoreDoc> sortByRank(Map<ScoreDoc, Integer> inputList) {
    LOG.debug("[sortByRank] - BEGIN");
    List<ScoreDoc> result = new ArrayList<>();
    LinkedMap apacheMap = new LinkedMap(inputList);
    for (int i = 0; i < apacheMap.size() - 1; i++) {
        Map<Float, ScoreDoc> treeMap = new TreeMap<>(Collections.reverseOrder());
        do {/*ww  w .  j  a v  a 2s. c o  m*/
            i++;
            treeMap.put(((ScoreDoc) apacheMap.get(i - 1)).score, (ScoreDoc) apacheMap.get(i - 1));
        } while (i < apacheMap.size() && apacheMap.getValue(i) == apacheMap.getValue(i - 1));
        i--;
        treeMap.keySet().stream().forEach((score) -> {
            result.add(treeMap.get(score));
        });
    }
    LOG.debug("[sortByRank] - END");
    return result;
}

From source file:de.innovationgate.wgpublisher.webtml.EventScript.java

/**
 * Searches the fired events queue for events of a name, beginning at a specified index
 * @param name The event name to search for
 * @param index The start index in the queue
 * @return a list of found events/*from  www  .  j ava2s  . c om*/
 * @throws WGAPIException 
 */
private List<PortletEvent> findEventsOfName(String name, Long index) throws WGAPIException {

    List<PortletEvent> foundEvents = new ArrayList<PortletEvent>();
    HttpSession session = getPageContext().getSession();
    LinkedMap events = TMLPortlet.getFiredEventsQueue(session);

    if (events.size() == 0) {
        return foundEvents;
    }

    // Find the start index. This is either the index after the last processed index, or - if the last processed
    // index is not available in the queue - the first index in the queue. 
    if (events.containsKey(index)) {
        index = (Long) events.nextKey(index);
    } else {
        index = (Long) events.firstKey();
    }

    synchronized (events) {
        PortletEvent event;
        while (index != null) {
            event = (PortletEvent) events.get(index);
            String targetPortletKey = event.getTargetPortletKey();
            if (targetPortletKey == null
                    || targetPortletKey.equals(getTMLContext().getportlet().getportletkey())) {
                if (event.getName().equalsIgnoreCase(name)) {
                    foundEvents.add(event);
                }
            }
            index = (Long) events.nextKey(index);

        }
    }

    return foundEvents;

}

From source file:it.polito.tellmefirst.web.rest.clients.ClientEpub.java

public ArrayList<ClassifyOutput> sortByRank(HashMap<ClassifyOutput, Integer> inputList) {

    LOG.debug("[sortByRank] - BEGIN");

    ArrayList<ClassifyOutput> result = new ArrayList<>();
    LinkedMap apacheMap = new LinkedMap(inputList);
    for (int i = 0; i < apacheMap.size() - 1; i++) {
        TreeMap<Float, ClassifyOutput> treeMap = new TreeMap<>(Collections.reverseOrder());
        do {/*from www  .j a  va2 s . c  o m*/
            i++;
            treeMap.put(Float.valueOf(((ClassifyOutput) apacheMap.get(i - 1)).getScore()),
                    (ClassifyOutput) apacheMap.get(i - 1));
        } while (i < apacheMap.size() && apacheMap.getValue(i) == apacheMap.getValue(i - 1));
        i--;
        for (Float score : treeMap.keySet()) {
            result.add(treeMap.get(score));
        }
    }

    LOG.debug("[sortByRank] - END");
    return result;
}

From source file:de.innovationgate.wgpublisher.webtml.portlet.TMLPortlet.java

public void prepareEventProcessing(Base tag) throws WGAPIException {

    TMLPortletState sessionContext = getState();
    LinkedMap list = TMLPortlet.getFiredEventsQueue(tag.getPageContext().getSession());

    // Look if the event queue proceeded since the last processed event
    if (list.size() > 0) {
        PortletEvent lastEvent = (PortletEvent) list.get(list.lastKey());
        if (lastEvent != null) {
            if (lastEvent.getIndex() > sessionContext.getLastProcessedEventIndex()) {

                // Find the start index for processing new events
                Long startIndex;//from  w w  w.j a  v a 2  s .  c om
                Long lastProcessedIndex = new Long(sessionContext.getLastProcessedEventIndex());
                if (list.containsKey(lastProcessedIndex)) {
                    startIndex = (Long) list.nextKey(lastProcessedIndex);
                } else {
                    startIndex = (Long) list.firstKey();
                }

                // Set start index as WebTML option
                tag.getStatus().setOption(Base.OPTION_PORTLET_EVENT_STARTINDEX,
                        new Long(sessionContext.getLastProcessedEventIndex()), TMLOption.SCOPE_GLOBAL);

                // Update last processed event index to be the newest event's index
                sessionContext.setLastProcessedEventIndex(lastEvent.getIndex());
            }
        }
    }
}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public void readDistinctElementsFromPairsAddClass(String pairsFilepath) {
    //readDistinctElementsIntoList
    List<Object> distinctElements = readDistinctElementsIntoList(pairsFilepath);
    System.out.println("Size of distinctElements" + distinctElements.size());
    for (int i = 0; i < distinctElements.size(); i++) {
        System.out.println("distinctElements " + i + " " + distinctElements.get(i));
    }// w  w  w . j ava2s . com

    //get class for those distinct elements from original cohort file
    String originalFile = "data/cohort1/bio_nlp/cohort1_s.txt";
    BufferedReader br = null;
    String thisLine;
    String[] lineArray;
    LinkedMap originalMap = new LinkedMap();
    BufferedWriter distinctPriorityPairsWriter = null;

    try {
        br = new BufferedReader(new FileReader(originalFile));
        while ((thisLine = br.readLine()) != null) {
            thisLine = thisLine.trim();
            if (thisLine.equals(""))
                continue;

            lineArray = thisLine.split("\t");
            originalMap.put(lineArray[3], lineArray[1]);
        }

        //write distinct elements with class to an output file
        StringBuffer outfileBuffer = new StringBuffer();
        for (int i = 0; i < distinctElements.size(); i++)
            outfileBuffer.append(distinctElements.get(i)).append("\t")
                    .append(originalMap.get(distinctElements.get(i)) + "\n");

        distinctPriorityPairsWriter = new BufferedWriter(
                new FileWriter(pairsFilepath.split("\\.")[0] + "_distinct_with_class.txt"));

        distinctPriorityPairsWriter.append(outfileBuffer.toString());
        outfileBuffer.setLength(0);
        distinctPriorityPairsWriter.flush();

    } catch (IOException io) {
        try {
            if (br != null)
                br.close();
            io.printStackTrace();
        } catch (IOException e) {
            System.out.println("Problem occured while closing output stream " + br);
            e.printStackTrace();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public void filterPairsThatExist(String inputFilePath1, String inputFilePath2) {
    //eg. testdata(the data to check), traindata(original data)

    //Read the files
    List<String> phenotypeList1 = new ArrayList<String>();
    List<String> phenotypeList2 = new ArrayList<String>(); //sure pairs
    LinkedMap surePairsAdjacencyMap = new LinkedMap();
    try {/*from   w ww  .  j a  v a 2 s .  c om*/
        phenotypeList1 = FileUtils.readLines(new File(inputFilePath1));
        phenotypeList2 = FileUtils.readLines(new File(inputFilePath2));

        String[] lineArray;
        List<String> resultList = new ArrayList<String>();
        List<String> surePairsMapValue = null;

        System.out.println(phenotypeList2.size());
        //construct a map of phenotype and its neighbors for sure-pairs
        for (int i = 0; i < phenotypeList2.size(); i++) {
            lineArray = phenotypeList2.get(i).split("\t");
            surePairsMapValue = new ArrayList<String>();

            //if the first value is existing in the map, get the second value
            if (surePairsAdjacencyMap.containsKey(lineArray[0])) {
                surePairsMapValue = (List<String>) surePairsAdjacencyMap.get(lineArray[0]);
            }
            //System.out.println("SurePairsMapValueSize " + surePairsMapValue.size());
            //if the value does not already contain the second, add the string and add it back to the map
            if (!surePairsMapValue.contains(lineArray[1]))
                surePairsMapValue.add(lineArray[1]);
            surePairsAdjacencyMap.put(lineArray[0], surePairsMapValue);

            //In the same manner, update the adjacency of the second string
            surePairsMapValue = new ArrayList<String>();
            if (surePairsAdjacencyMap.containsKey(lineArray[1])) {
                surePairsMapValue = (List<String>) surePairsAdjacencyMap.get(lineArray[1]);
            }
            if (!surePairsMapValue.contains(lineArray[0]))
                surePairsMapValue.add(lineArray[0]);
            surePairsAdjacencyMap.put(lineArray[1], surePairsMapValue);
        }

        List valueList = null;
        for (int i = 0; i < surePairsAdjacencyMap.size(); i++) {
            System.out.println("Key : " + surePairsAdjacencyMap.get(i) + " Value : "
                    + ((List) surePairsAdjacencyMap.get(surePairsAdjacencyMap.get(i))).size());
            /*valueList = (List)surePairsAdjacencyMap.get(surePairsAdjacencyMap.get(i));
              for(int j =0; j<valueList.size(); j++)
                 System.out.println("Value :" + valueList.get(j) );
              //break;*/
        }

        //Now parse the new pairs file, and check if the pairs already exists in the sure pairs map
        boolean existsSurePairs = false;
        System.out.println(phenotypeList1.size());
        surePairsMapValue = new ArrayList<String>();
        for (int j = 0; j < phenotypeList1.size(); j++) {

            lineArray = phenotypeList1.get(j).split("\t");
            if (surePairsAdjacencyMap.containsKey(lineArray[0])) {
                surePairsMapValue = (List) surePairsAdjacencyMap.get(lineArray[0]);
                if (surePairsMapValue.contains(lineArray[1])) {
                    existsSurePairs = true;
                }
            } else if (surePairsAdjacencyMap.containsKey(lineArray[1])) {
                surePairsMapValue = (List) surePairsAdjacencyMap.get(lineArray[1]);
                if (surePairsMapValue.contains(lineArray[0])) {
                    existsSurePairs = true;
                }
            }

            if (!existsSurePairs) //if it does not exist in surepairs, then write to output file
                resultList.add(String.format("%s\t%s\t%s", lineArray[0], lineArray[1], lineArray[2]));
            existsSurePairs = false;
        }
        String resultFilePath = inputFilePath1.split("\\.")[0] + "_filtered.txt";
        FileUtils.writeLines(new File(resultFilePath), resultList);
    } catch (IOException ioe) {
        ioe.printStackTrace();
    }
}

From source file:net.rptools.maptool.client.ui.MapToolFrame.java

private Timer newChatTimer() {
    // Set up the Chat timer to listen for changes
    Timer tm = new Timer(500, new ActionListener() {
        public void actionPerformed(ActionEvent ae) {
            long currentTime = System.currentTimeMillis();
            LinkedMap chatTimers = chatTyperTimers.getChatTypers();
            List<String> removeThese = new ArrayList<String>(chatTimers.size());

            @SuppressWarnings("unchecked")
            Set<String> playerTimers = chatTimers.keySet();
            for (String player : playerTimers) {
                long playerTime = (Long) chatTimers.get(player);
                if (currentTime - playerTime >= (chatNotifyDuration * 1000)) {
                    // set up a temp place and remove them after the loop
                    removeThese.add(player);
                }/* w w  w.j  a v a2 s  . com*/
            }
            for (String remove : removeThese) {
                chatTyperTimers.removeChatTyper(remove);
            }
        }
    });
    tm.start();
    return tm;
}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

private void getGoldClassForPairs(String inputFilePath) {

    LinkedMap originalTestPairsClassMap = new LinkedMap();
    String originalTestPairsFile = "data/cohort1/bio_nlp/cohort1_s_test_pairs.txt";
    BufferedReader br = null;/*from   w w w. jav a  2 s  . c o  m*/
    String[] lineArray;
    String thisLine;
    try {
        br = new BufferedReader(new FileReader(originalTestPairsFile));
        while ((thisLine = br.readLine()) != null) {
            thisLine = thisLine.trim();
            if (thisLine.equals(""))
                continue;

            lineArray = thisLine.split("\t");
            originalTestPairsClassMap.put(lineArray[0] + "\t" + lineArray[1], lineArray[2]);
        }
    } catch (IOException io) {
        try {
            if (br != null)
                br.close();
            io.printStackTrace();
        } catch (IOException e) {
            System.out.println("Problem occured while closing output stream " + br);
            e.printStackTrace();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

    List<String> phenotypeList = null;
    StringBuffer resultFileBuffer = new StringBuffer();

    //Read the pairs file, and write the pairs with class from actual test file
    try {
        BufferedWriter resultPairsWriter = new BufferedWriter(
                new FileWriter(inputFilePath.split("\\.")[0] + "_with_gold_class.txt"));
        phenotypeList = FileUtils.readLines(new File(inputFilePath));

        for (int i = 0; i < phenotypeList.size(); i++) {

            lineArray = phenotypeList.get(i).split("\t");
            resultFileBuffer.append(lineArray[0]).append("\t").append(lineArray[1]).append("\t")
                    .append(originalTestPairsClassMap.get(lineArray[0] + "\t" + lineArray[1])).append("\n");
        }

        resultPairsWriter.append(resultFileBuffer.toString());
        resultFileBuffer.setLength(0);
        resultPairsWriter.flush();

    } catch (IOException e) {
        System.out.println("Error while reading/writing file with pairs" + e.getMessage());
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java

public List<WGUpdateLog> getUpdateLogs(Comparable fromRevision) throws WGAPIException {

    try {/*from   w w w . ja  v  a2 s  .c o  m*/
        Iterator logEntries;
        if (_ddlVersion >= WGDatabase.CSVERSION_WGA5) {
            Query logEntriesQ = getSession().createQuery(
                    "from de.innovationgate.webgate.api.jdbc.LogEntry as logentry where id > :start order by id asc");
            logEntriesQ.setLong("start", ((Long) fromRevision).longValue());
            logEntries = logEntriesQ.iterate();

        } else {
            Date cutoff = (Date) fromRevision;
            Query logEntriesQ = getSession().createQuery(
                    "from de.innovationgate.webgate.api.jdbc.LogEntry as logentry where logtime >= :start order by logtime asc");
            logEntriesQ.setTimestamp("start", new java.sql.Timestamp(cutoff.getTime()));
            logEntries = logEntriesQ.iterate();
        }

        List wgLogs = new ArrayList();
        LinkedMap wgLogsByTarget = new LinkedMap();
        Map conflictTargets = new HashMap();

        LogEntry entry;

        // First pass: Create update logs
        while (logEntries.hasNext()) {
            entry = (LogEntry) logEntries.next();
            WGUpdateLog newLog = null;
            WGUpdateLog oldLog = null;
            Date currentTime = null;
            if (entry.getTarget() != null && !entry.getTarget().equals("#UNKNOWN#")) {
                newLog = readUpdateLog(entry);
                wgLogs.add(newLog);

                List logsList = (List) wgLogsByTarget.get(entry.getTarget());
                if (logsList == null) {
                    logsList = new ArrayList();
                    wgLogsByTarget.put(entry.getTarget(), logsList);
                }
                logsList.add(newLog);
            }
        }

        // Second pass for CS version < 5 to workaround some weaknesses of the CS3/4 history log
        if (_ddlVersion < WGDatabase.CSVERSION_WGA5) {

            // Determine conflicting log entries, where update and delete is done on the same time and the same document
            Iterator wgLogsByTargetIt = wgLogsByTarget.values().iterator();
            while (wgLogsByTargetIt.hasNext()) {
                List logs = (List) wgLogsByTargetIt.next();
                WGUtils.sortByProperty(logs, "date");
                Iterator logsIt = logs.iterator();
                Date lastTime = null;
                List<WGUpdateLog> logsAtSameTime = new ArrayList();
                while (logsIt.hasNext()) {
                    WGUpdateLog log = (WGUpdateLog) logsIt.next();
                    if (log.getDate().equals(lastTime)) {
                        logsAtSameTime.add(log);
                    } else {
                        resolveLogConflicts(wgLogs, logsAtSameTime);
                        logsAtSameTime.clear();
                    }
                    lastTime = log.getDate();
                }
            }

            // Order logentries that have the same time in an order that assures dependency documents are created before their dependent documents
            Collections.sort(wgLogs, new DocumentDependencyComparator());

        }

        return wgLogs;
    } catch (HibernateException e) {
        throw new WGBackendException("Unable to retrieve updated documents", e);
    }

}