Example usage for org.apache.commons.collections.map LinkedMap size

List of usage examples for org.apache.commons.collections.map LinkedMap size

Introduction

In this page you can find the example usage for org.apache.commons.collections.map LinkedMap size.

Prototype

public int size() 

Source Link

Document

Gets the size of the map.

Usage

From source file:de.innovationgate.wgpublisher.webtml.portlet.TMLPortlet.java

protected static void addEventToQueue(PortletEvent event, HttpSession session) {

    LinkedMap events = getFiredEventsQueue(session);

    synchronized (events) {
        event.retrieveIndex();// w w  w.j  a v  a2  s .c  o  m
        events.put(new Long(event.getIndex()), event);
        while (events.size() > EVENTQUEUE_MAX_SIZE) {
            events.remove(events.firstKey());
        }
    }

}

From source file:de.innovationgate.wgpublisher.webtml.EventScript.java

/**
 * Searches the fired events queue for events of a name, beginning at a specified index
 * @param name The event name to search for
 * @param index The start index in the queue
 * @return a list of found events//w  w w . j av a2s  .  c  o  m
 * @throws WGAPIException 
 */
private List<PortletEvent> findEventsOfName(String name, Long index) throws WGAPIException {

    List<PortletEvent> foundEvents = new ArrayList<PortletEvent>();
    HttpSession session = getPageContext().getSession();
    LinkedMap events = TMLPortlet.getFiredEventsQueue(session);

    if (events.size() == 0) {
        return foundEvents;
    }

    // Find the start index. This is either the index after the last processed index, or - if the last processed
    // index is not available in the queue - the first index in the queue. 
    if (events.containsKey(index)) {
        index = (Long) events.nextKey(index);
    } else {
        index = (Long) events.firstKey();
    }

    synchronized (events) {
        PortletEvent event;
        while (index != null) {
            event = (PortletEvent) events.get(index);
            String targetPortletKey = event.getTargetPortletKey();
            if (targetPortletKey == null
                    || targetPortletKey.equals(getTMLContext().getportlet().getportletkey())) {
                if (event.getName().equalsIgnoreCase(name)) {
                    foundEvents.add(event);
                }
            }
            index = (Long) events.nextKey(index);

        }
    }

    return foundEvents;

}

From source file:com.tilab.ca.sse.core.classify.Classifier.java

private List<ScoreDoc> sortByRank(Map<ScoreDoc, Integer> inputList) {
    LOG.debug("[sortByRank] - BEGIN");
    List<ScoreDoc> result = new ArrayList<>();
    LinkedMap apacheMap = new LinkedMap(inputList);
    for (int i = 0; i < apacheMap.size() - 1; i++) {
        Map<Float, ScoreDoc> treeMap = new TreeMap<>(Collections.reverseOrder());
        do {//  ww  w .j a v  a2  s  .c o m
            i++;
            treeMap.put(((ScoreDoc) apacheMap.get(i - 1)).score, (ScoreDoc) apacheMap.get(i - 1));
        } while (i < apacheMap.size() && apacheMap.getValue(i) == apacheMap.getValue(i - 1));
        i--;
        treeMap.keySet().stream().forEach((score) -> {
            result.add(treeMap.get(score));
        });
    }
    LOG.debug("[sortByRank] - END");
    return result;
}

From source file:it.polito.tellmefirst.web.rest.clients.ClientEpub.java

public ArrayList<ClassifyOutput> sortByRank(HashMap<ClassifyOutput, Integer> inputList) {

    LOG.debug("[sortByRank] - BEGIN");

    ArrayList<ClassifyOutput> result = new ArrayList<>();
    LinkedMap apacheMap = new LinkedMap(inputList);
    for (int i = 0; i < apacheMap.size() - 1; i++) {
        TreeMap<Float, ClassifyOutput> treeMap = new TreeMap<>(Collections.reverseOrder());
        do {/*w w w  . jav  a2s .  co  m*/
            i++;
            treeMap.put(Float.valueOf(((ClassifyOutput) apacheMap.get(i - 1)).getScore()),
                    (ClassifyOutput) apacheMap.get(i - 1));
        } while (i < apacheMap.size() && apacheMap.getValue(i) == apacheMap.getValue(i - 1));
        i--;
        for (Float score : treeMap.keySet()) {
            result.add(treeMap.get(score));
        }
    }

    LOG.debug("[sortByRank] - END");
    return result;
}

From source file:de.innovationgate.wgpublisher.webtml.portlet.TMLPortlet.java

public void prepareEventProcessing(Base tag) throws WGAPIException {

    TMLPortletState sessionContext = getState();
    LinkedMap list = TMLPortlet.getFiredEventsQueue(tag.getPageContext().getSession());

    // Look if the event queue proceeded since the last processed event
    if (list.size() > 0) {
        PortletEvent lastEvent = (PortletEvent) list.get(list.lastKey());
        if (lastEvent != null) {
            if (lastEvent.getIndex() > sessionContext.getLastProcessedEventIndex()) {

                // Find the start index for processing new events
                Long startIndex;/*ww  w.ja  va2 s  . c o  m*/
                Long lastProcessedIndex = new Long(sessionContext.getLastProcessedEventIndex());
                if (list.containsKey(lastProcessedIndex)) {
                    startIndex = (Long) list.nextKey(lastProcessedIndex);
                } else {
                    startIndex = (Long) list.firstKey();
                }

                // Set start index as WebTML option
                tag.getStatus().setOption(Base.OPTION_PORTLET_EVENT_STARTINDEX,
                        new Long(sessionContext.getLastProcessedEventIndex()), TMLOption.SCOPE_GLOBAL);

                // Update last processed event index to be the newest event's index
                sessionContext.setLastProcessedEventIndex(lastEvent.getIndex());
            }
        }
    }
}

From source file:net.rptools.maptool.client.ui.ChatTypingNotification.java

/**
 * This component is only made visible when there are notifications to be displayed. That means the first couple of
 * IF statements in this method are redundant since paintComponent() will not be called unless the component is
 * visible, and it will only be visible when there are notifications...
 *//* w  ww. ja  va  2  s .  c  o m*/
@Override
protected void paintComponent(Graphics g) {
    //      System.out.println("Chat panel is painting itself...");
    if (AppPreferences.getTypingNotificationDuration() == 0) {
        return;
    }
    LinkedMap chatTypers = MapTool.getFrame().getChatNotificationTimers().getChatTypers();
    if (chatTypers == null || chatTypers.isEmpty()) {
        return;
    }
    Boolean showBackground = AppPreferences.getChatNotificationShowBackground();

    Graphics2D statsG = (Graphics2D) g.create();

    Font boldFont = AppStyle.labelFont.deriveFont(Font.BOLD);
    Font font = AppStyle.labelFont;
    FontMetrics valueFM = g.getFontMetrics(font);
    FontMetrics keyFM = g.getFontMetrics(boldFont);

    int PADDING7 = 7;
    int PADDING3 = 3;
    int PADDING2 = 2;

    BufferedImage img = AppStyle.panelTexture;
    int rowHeight = Math.max(valueFM.getHeight(), keyFM.getHeight());

    setBorder(null);
    int width = AppStyle.miniMapBorder.getRightMargin() + AppStyle.miniMapBorder.getLeftMargin();
    int height = getHeight() - PADDING2 + AppStyle.miniMapBorder.getTopMargin()
            + AppStyle.miniMapBorder.getBottomMargin();

    statsG.setFont(font);
    SwingUtil.useAntiAliasing(statsG);
    Rectangle bounds = new Rectangle(AppStyle.miniMapBorder.getLeftMargin(),
            height - getHeight() - AppStyle.miniMapBorder.getTopMargin(), getWidth() - width,
            getHeight() - AppStyle.miniMapBorder.getBottomMargin() - AppStyle.miniMapBorder.getTopMargin()
                    + PADDING2);

    int y = bounds.y + rowHeight;
    rowHeight = Math.max(rowHeight, AppStyle.chatImage.getHeight());

    setSize(getWidth(), ((chatTypers.size() * (PADDING3 + rowHeight)) + AppStyle.miniMapBorder.getTopMargin()
            + AppStyle.miniMapBorder.getBottomMargin()));

    if (showBackground) {
        g.drawImage(img, 0, 0, getWidth(), getHeight() + PADDING7, this);
        AppStyle.miniMapBorder.paintAround(statsG, bounds);
    }
    Rectangle rightRow = new Rectangle(AppStyle.miniMapBorder.getLeftMargin() + PADDING7,
            AppStyle.miniMapBorder.getTopMargin() + PADDING7, AppStyle.chatImage.getWidth(),
            AppStyle.chatImage.getHeight());

    Set<?> keySet = chatTypers.keySet();
    @SuppressWarnings("unchecked")
    Set<String> playerTimers = (Set<String>) keySet;
    for (String playerNamer : playerTimers) {
        if (showBackground) {
            statsG.setColor(new Color(249, 241, 230, 140));
            statsG.fillRect(bounds.x + PADDING3, y - keyFM.getAscent(),
                    (bounds.width - PADDING7 / 2) - PADDING3, rowHeight);
            statsG.setColor(new Color(175, 163, 149));
            statsG.drawRect(bounds.x + PADDING3, y - keyFM.getAscent(),
                    (bounds.width - PADDING7 / 2) - PADDING3, rowHeight);
        }
        g.drawImage(AppStyle.chatImage, bounds.x + 5, y - keyFM.getAscent(), (int) rightRow.getWidth(),
                (int) rightRow.getHeight(), this);

        // Values
        statsG.setColor(MapTool.getFrame().getChatTypingLabelColor());
        statsG.setFont(boldFont);
        statsG.drawString(I18N.getText("msg.commandPanel.liveTyping", playerNamer),
                bounds.x + AppStyle.chatImage.getWidth() + PADDING7 * 2, y + 5);

        y += PADDING2 + rowHeight;
    }
    if (showBackground) {
        AppStyle.shadowBorder.paintWithin(statsG, bounds);
    } else {
        setOpaque(false);
    }
}

From source file:net.rptools.maptool.client.ui.MapToolFrame.java

private Timer newChatTimer() {
    // Set up the Chat timer to listen for changes
    Timer tm = new Timer(500, new ActionListener() {
        public void actionPerformed(ActionEvent ae) {
            long currentTime = System.currentTimeMillis();
            LinkedMap chatTimers = chatTyperTimers.getChatTypers();
            List<String> removeThese = new ArrayList<String>(chatTimers.size());

            @SuppressWarnings("unchecked")
            Set<String> playerTimers = chatTimers.keySet();
            for (String player : playerTimers) {
                long playerTime = (Long) chatTimers.get(player);
                if (currentTime - playerTime >= (chatNotifyDuration * 1000)) {
                    // set up a temp place and remove them after the loop
                    removeThese.add(player);
                }/*from   w  w w. ja  va  2s  . c om*/
            }
            for (String remove : removeThese) {
                chatTyperTimers.removeChatTyper(remove);
            }
        }
    });
    tm.start();
    return tm;
}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public void filterPairsThatExist(String inputFilePath1, String inputFilePath2) {
    //eg. testdata(the data to check), traindata(original data)

    //Read the files
    List<String> phenotypeList1 = new ArrayList<String>();
    List<String> phenotypeList2 = new ArrayList<String>(); //sure pairs
    LinkedMap surePairsAdjacencyMap = new LinkedMap();
    try {/*from  w w w .  j a va  2s  . c om*/
        phenotypeList1 = FileUtils.readLines(new File(inputFilePath1));
        phenotypeList2 = FileUtils.readLines(new File(inputFilePath2));

        String[] lineArray;
        List<String> resultList = new ArrayList<String>();
        List<String> surePairsMapValue = null;

        System.out.println(phenotypeList2.size());
        //construct a map of phenotype and its neighbors for sure-pairs
        for (int i = 0; i < phenotypeList2.size(); i++) {
            lineArray = phenotypeList2.get(i).split("\t");
            surePairsMapValue = new ArrayList<String>();

            //if the first value is existing in the map, get the second value
            if (surePairsAdjacencyMap.containsKey(lineArray[0])) {
                surePairsMapValue = (List<String>) surePairsAdjacencyMap.get(lineArray[0]);
            }
            //System.out.println("SurePairsMapValueSize " + surePairsMapValue.size());
            //if the value does not already contain the second, add the string and add it back to the map
            if (!surePairsMapValue.contains(lineArray[1]))
                surePairsMapValue.add(lineArray[1]);
            surePairsAdjacencyMap.put(lineArray[0], surePairsMapValue);

            //In the same manner, update the adjacency of the second string
            surePairsMapValue = new ArrayList<String>();
            if (surePairsAdjacencyMap.containsKey(lineArray[1])) {
                surePairsMapValue = (List<String>) surePairsAdjacencyMap.get(lineArray[1]);
            }
            if (!surePairsMapValue.contains(lineArray[0]))
                surePairsMapValue.add(lineArray[0]);
            surePairsAdjacencyMap.put(lineArray[1], surePairsMapValue);
        }

        List valueList = null;
        for (int i = 0; i < surePairsAdjacencyMap.size(); i++) {
            System.out.println("Key : " + surePairsAdjacencyMap.get(i) + " Value : "
                    + ((List) surePairsAdjacencyMap.get(surePairsAdjacencyMap.get(i))).size());
            /*valueList = (List)surePairsAdjacencyMap.get(surePairsAdjacencyMap.get(i));
              for(int j =0; j<valueList.size(); j++)
                 System.out.println("Value :" + valueList.get(j) );
              //break;*/
        }

        //Now parse the new pairs file, and check if the pairs already exists in the sure pairs map
        boolean existsSurePairs = false;
        System.out.println(phenotypeList1.size());
        surePairsMapValue = new ArrayList<String>();
        for (int j = 0; j < phenotypeList1.size(); j++) {

            lineArray = phenotypeList1.get(j).split("\t");
            if (surePairsAdjacencyMap.containsKey(lineArray[0])) {
                surePairsMapValue = (List) surePairsAdjacencyMap.get(lineArray[0]);
                if (surePairsMapValue.contains(lineArray[1])) {
                    existsSurePairs = true;
                }
            } else if (surePairsAdjacencyMap.containsKey(lineArray[1])) {
                surePairsMapValue = (List) surePairsAdjacencyMap.get(lineArray[1]);
                if (surePairsMapValue.contains(lineArray[0])) {
                    existsSurePairs = true;
                }
            }

            if (!existsSurePairs) //if it does not exist in surepairs, then write to output file
                resultList.add(String.format("%s\t%s\t%s", lineArray[0], lineArray[1], lineArray[2]));
            existsSurePairs = false;
        }
        String resultFilePath = inputFilePath1.split("\\.")[0] + "_filtered.txt";
        FileUtils.writeLines(new File(resultFilePath), resultList);
    } catch (IOException ioe) {
        ioe.printStackTrace();
    }
}

From source file:org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBaseTest.java

@Test
@SuppressWarnings("unchecked")
public void testSnapshotStateWithCommitOnCheckpointsEnabled() throws Exception {

    // --------------------------------------------------------------------
    //   prepare fake states
    // --------------------------------------------------------------------

    final HashMap<KafkaTopicPartition, Long> state1 = new HashMap<>();
    state1.put(new KafkaTopicPartition("abc", 13), 16768L);
    state1.put(new KafkaTopicPartition("def", 7), 987654321L);

    final HashMap<KafkaTopicPartition, Long> state2 = new HashMap<>();
    state2.put(new KafkaTopicPartition("abc", 13), 16770L);
    state2.put(new KafkaTopicPartition("def", 7), 987654329L);

    final HashMap<KafkaTopicPartition, Long> state3 = new HashMap<>();
    state3.put(new KafkaTopicPartition("abc", 13), 16780L);
    state3.put(new KafkaTopicPartition("def", 7), 987654377L);

    // --------------------------------------------------------------------

    final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);
    when(fetcher.snapshotCurrentState()).thenReturn(state1, state2, state3);

    final LinkedMap pendingOffsetsToCommit = new LinkedMap();

    FlinkKafkaConsumerBase<String> consumer = getConsumer(fetcher, pendingOffsetsToCommit, true);
    StreamingRuntimeContext mockRuntimeContext = mock(StreamingRuntimeContext.class);
    when(mockRuntimeContext.isCheckpointingEnabled()).thenReturn(true); // enable checkpointing
    consumer.setRuntimeContext(mockRuntimeContext);

    assertEquals(0, pendingOffsetsToCommit.size());

    OperatorStateStore backend = mock(OperatorStateStore.class);

    TestingListState<Serializable> listState = new TestingListState<>();

    when(backend.getSerializableListState(Matchers.any(String.class))).thenReturn(listState);

    StateInitializationContext initializationContext = mock(StateInitializationContext.class);

    when(initializationContext.getOperatorStateStore()).thenReturn(backend);
    when(initializationContext.isRestored()).thenReturn(false, true, true, true);

    consumer.initializeState(initializationContext);

    consumer.open(new Configuration());

    // checkpoint 1
    consumer.snapshotState(new StateSnapshotContextSynchronousImpl(138, 138));

    HashMap<KafkaTopicPartition, Long> snapshot1 = new HashMap<>();

    for (Serializable serializable : listState.get()) {
        Tuple2<KafkaTopicPartition, Long> kafkaTopicPartitionLongTuple2 = (Tuple2<KafkaTopicPartition, Long>) serializable;
        snapshot1.put(kafkaTopicPartitionLongTuple2.f0, kafkaTopicPartitionLongTuple2.f1);
    }//w  ww.j  ava 2s  .  c om

    assertEquals(state1, snapshot1);
    assertEquals(1, pendingOffsetsToCommit.size());
    assertEquals(state1, pendingOffsetsToCommit.get(138L));

    // checkpoint 2
    consumer.snapshotState(new StateSnapshotContextSynchronousImpl(140, 140));

    HashMap<KafkaTopicPartition, Long> snapshot2 = new HashMap<>();

    for (Serializable serializable : listState.get()) {
        Tuple2<KafkaTopicPartition, Long> kafkaTopicPartitionLongTuple2 = (Tuple2<KafkaTopicPartition, Long>) serializable;
        snapshot2.put(kafkaTopicPartitionLongTuple2.f0, kafkaTopicPartitionLongTuple2.f1);
    }

    assertEquals(state2, snapshot2);
    assertEquals(2, pendingOffsetsToCommit.size());
    assertEquals(state2, pendingOffsetsToCommit.get(140L));

    // ack checkpoint 1
    consumer.notifyCheckpointComplete(138L);
    assertEquals(1, pendingOffsetsToCommit.size());
    assertTrue(pendingOffsetsToCommit.containsKey(140L));

    // checkpoint 3
    consumer.snapshotState(new StateSnapshotContextSynchronousImpl(141, 141));

    HashMap<KafkaTopicPartition, Long> snapshot3 = new HashMap<>();

    for (Serializable serializable : listState.get()) {
        Tuple2<KafkaTopicPartition, Long> kafkaTopicPartitionLongTuple2 = (Tuple2<KafkaTopicPartition, Long>) serializable;
        snapshot3.put(kafkaTopicPartitionLongTuple2.f0, kafkaTopicPartitionLongTuple2.f1);
    }

    assertEquals(state3, snapshot3);
    assertEquals(2, pendingOffsetsToCommit.size());
    assertEquals(state3, pendingOffsetsToCommit.get(141L));

    // ack checkpoint 3, subsumes number 2
    consumer.notifyCheckpointComplete(141L);
    assertEquals(0, pendingOffsetsToCommit.size());

    consumer.notifyCheckpointComplete(666); // invalid checkpoint
    assertEquals(0, pendingOffsetsToCommit.size());

    OperatorStateStore operatorStateStore = mock(OperatorStateStore.class);
    listState = new TestingListState<>();
    when(operatorStateStore.getOperatorState(Matchers.any(ListStateDescriptor.class))).thenReturn(listState);

    // create 500 snapshots
    for (int i = 100; i < 600; i++) {
        consumer.snapshotState(new StateSnapshotContextSynchronousImpl(i, i));
        listState.clear();
    }
    assertEquals(FlinkKafkaConsumerBase.MAX_NUM_PENDING_CHECKPOINTS, pendingOffsetsToCommit.size());

    // commit only the second last
    consumer.notifyCheckpointComplete(598);
    assertEquals(1, pendingOffsetsToCommit.size());

    // access invalid checkpoint
    consumer.notifyCheckpointComplete(590);

    // and the last
    consumer.notifyCheckpointComplete(599);
    assertEquals(0, pendingOffsetsToCommit.size());
}

From source file:org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBaseTest.java

@Test
@SuppressWarnings("unchecked")
public void testSnapshotStateWithCommitOnCheckpointsDisabled() throws Exception {
    // --------------------------------------------------------------------
    //   prepare fake states
    // --------------------------------------------------------------------

    final HashMap<KafkaTopicPartition, Long> state1 = new HashMap<>();
    state1.put(new KafkaTopicPartition("abc", 13), 16768L);
    state1.put(new KafkaTopicPartition("def", 7), 987654321L);

    final HashMap<KafkaTopicPartition, Long> state2 = new HashMap<>();
    state2.put(new KafkaTopicPartition("abc", 13), 16770L);
    state2.put(new KafkaTopicPartition("def", 7), 987654329L);

    final HashMap<KafkaTopicPartition, Long> state3 = new HashMap<>();
    state3.put(new KafkaTopicPartition("abc", 13), 16780L);
    state3.put(new KafkaTopicPartition("def", 7), 987654377L);

    // --------------------------------------------------------------------

    final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);
    when(fetcher.snapshotCurrentState()).thenReturn(state1, state2, state3);

    final LinkedMap pendingOffsetsToCommit = new LinkedMap();

    FlinkKafkaConsumerBase<String> consumer = getConsumer(fetcher, pendingOffsetsToCommit, true);
    StreamingRuntimeContext mockRuntimeContext = mock(StreamingRuntimeContext.class);
    when(mockRuntimeContext.isCheckpointingEnabled()).thenReturn(true); // enable checkpointing
    consumer.setRuntimeContext(mockRuntimeContext);

    consumer.setCommitOffsetsOnCheckpoints(false); // disable offset committing

    assertEquals(0, pendingOffsetsToCommit.size());

    OperatorStateStore backend = mock(OperatorStateStore.class);

    TestingListState<Serializable> listState = new TestingListState<>();

    when(backend.getSerializableListState(Matchers.any(String.class))).thenReturn(listState);

    StateInitializationContext initializationContext = mock(StateInitializationContext.class);

    when(initializationContext.getOperatorStateStore()).thenReturn(backend);
    when(initializationContext.isRestored()).thenReturn(false, true, true, true);

    consumer.initializeState(initializationContext);

    consumer.open(new Configuration());

    // checkpoint 1
    consumer.snapshotState(new StateSnapshotContextSynchronousImpl(138, 138));

    HashMap<KafkaTopicPartition, Long> snapshot1 = new HashMap<>();

    for (Serializable serializable : listState.get()) {
        Tuple2<KafkaTopicPartition, Long> kafkaTopicPartitionLongTuple2 = (Tuple2<KafkaTopicPartition, Long>) serializable;
        snapshot1.put(kafkaTopicPartitionLongTuple2.f0, kafkaTopicPartitionLongTuple2.f1);
    }/* w w  w  .j a va  2 s. c o  m*/

    assertEquals(state1, snapshot1);
    assertEquals(0, pendingOffsetsToCommit.size()); // pending offsets to commit should not be updated

    // checkpoint 2
    consumer.snapshotState(new StateSnapshotContextSynchronousImpl(140, 140));

    HashMap<KafkaTopicPartition, Long> snapshot2 = new HashMap<>();

    for (Serializable serializable : listState.get()) {
        Tuple2<KafkaTopicPartition, Long> kafkaTopicPartitionLongTuple2 = (Tuple2<KafkaTopicPartition, Long>) serializable;
        snapshot2.put(kafkaTopicPartitionLongTuple2.f0, kafkaTopicPartitionLongTuple2.f1);
    }

    assertEquals(state2, snapshot2);
    assertEquals(0, pendingOffsetsToCommit.size()); // pending offsets to commit should not be updated

    // ack checkpoint 1
    consumer.notifyCheckpointComplete(138L);
    verify(fetcher, never()).commitInternalOffsetsToKafka(anyMap()); // not offsets should be committed

    // checkpoint 3
    consumer.snapshotState(new StateSnapshotContextSynchronousImpl(141, 141));

    HashMap<KafkaTopicPartition, Long> snapshot3 = new HashMap<>();

    for (Serializable serializable : listState.get()) {
        Tuple2<KafkaTopicPartition, Long> kafkaTopicPartitionLongTuple2 = (Tuple2<KafkaTopicPartition, Long>) serializable;
        snapshot3.put(kafkaTopicPartitionLongTuple2.f0, kafkaTopicPartitionLongTuple2.f1);
    }

    assertEquals(state3, snapshot3);
    assertEquals(0, pendingOffsetsToCommit.size()); // pending offsets to commit should not be updated

    // ack checkpoint 3, subsumes number 2
    consumer.notifyCheckpointComplete(141L);
    verify(fetcher, never()).commitInternalOffsetsToKafka(anyMap()); // not offsets should be committed

    consumer.notifyCheckpointComplete(666); // invalid checkpoint
    verify(fetcher, never()).commitInternalOffsetsToKafka(anyMap()); // not offsets should be committed

    OperatorStateStore operatorStateStore = mock(OperatorStateStore.class);
    listState = new TestingListState<>();
    when(operatorStateStore.getOperatorState(Matchers.any(ListStateDescriptor.class))).thenReturn(listState);

    // create 500 snapshots
    for (int i = 100; i < 600; i++) {
        consumer.snapshotState(new StateSnapshotContextSynchronousImpl(i, i));
        listState.clear();
    }
    assertEquals(0, pendingOffsetsToCommit.size()); // pending offsets to commit should not be updated

    // commit only the second last
    consumer.notifyCheckpointComplete(598);
    verify(fetcher, never()).commitInternalOffsetsToKafka(anyMap()); // not offsets should be committed

    // access invalid checkpoint
    consumer.notifyCheckpointComplete(590);
    verify(fetcher, never()).commitInternalOffsetsToKafka(anyMap()); // not offsets should be committed

    // and the last
    consumer.notifyCheckpointComplete(599);
    verify(fetcher, never()).commitInternalOffsetsToKafka(anyMap()); // not offsets should be committed
}