Example usage for com.google.common.collect Multimap asMap

List of usage examples for com.google.common.collect Multimap asMap

Introduction

In this page you can find the example usage for com.google.common.collect Multimap asMap.

Prototype

Map<K, Collection<V>> asMap();

Source Link

Document

Returns a view of this multimap as a Map from each distinct key to the nonempty collection of that key's associated values.

Usage

From source file:com.sam.moca.web.console.ConsoleModel.java

/**
 * Get the list of cluster hosts and URLs.
 * /*from   ww  w  . ja va 2  s  .c o m*/
 * @param Map<String,String[]> parameters The parameters passed on the
 *            request.
 * @return
 */
public WebResults<?> getClusterRoles(Map<String, String[]> parameters) {
    WebResults<Multimap<InstanceUrl, RoleDefinition>> results = new WebResults<Multimap<InstanceUrl, RoleDefinition>>();

    ClusterRoleManager manager = ServerUtils.globalAttribute(ClusterRoleManager.class);

    Multimap<Node, RoleDefinition> multiMap = manager.getClusterRoles();
    Map<Node, InstanceUrl> urls = _clusterAdmin.getKnownNodes();

    Multimap<InstanceUrl, RoleDefinition> urlRoleMap = HashMultimap.create();

    for (Entry<Node, Collection<RoleDefinition>> entry : multiMap.asMap().entrySet()) {
        urlRoleMap.putAll(urls.get(entry.getKey()), entry.getValue());
    }

    results.add(urlRoleMap);
    return results;
}

From source file:com.cloudant.sync.datastore.BasicDatastore.java

@Override
public Map<String, Collection<String>> revsDiff(Multimap<String, String> revisions) {
    Preconditions.checkState(this.isOpen(), "Database is closed");
    Preconditions.checkNotNull(revisions, "Input revisions must not be null");
    Multimap<String, String> missingRevs = ArrayListMultimap.create();
    // Break the potentially big multimap into small ones so for each map,
    // a single query can be use to check if the <id, revision> pairs in sqlDb or not
    List<Multimap<String, String>> batches = this.multiMapPartitions(revisions,
            SQLITE_QUERY_PLACEHOLDERS_LIMIT);
    for (Multimap<String, String> batch : batches) {
        this.revsDiffBatch(batch);
        missingRevs.putAll(batch);//from  ww w .j a  va  2  s. c om
    }
    return missingRevs.asMap();
}

From source file:com.facebook.presto.execution.TaskExecutorSimulator.java

public void run() throws Exception {
    Multimap<Integer, SimulationTask> tasks = Multimaps
            .synchronizedListMultimap(ArrayListMultimap.<Integer, SimulationTask>create());
    Set<ListenableFuture<?>> finishFutures = newConcurrentHashSet();
    AtomicBoolean done = new AtomicBoolean();

    long start = System.nanoTime();

    // large tasks
    for (int userId = 0; userId < 2; userId++) {
        ListenableFuture<?> future = createUser("large_" + userId, 100, taskExecutor, done, tasks);
        finishFutures.add(future);/*from w  w  w . ja v a2  s. c  o  m*/
    }

    // small tasks
    for (int userId = 0; userId < 4; userId++) {
        ListenableFuture<?> future = createUser("small_" + userId, 5, taskExecutor, done, tasks);
        finishFutures.add(future);
    }

    // tiny tasks
    for (int userId = 0; userId < 1; userId++) {
        ListenableFuture<?> future = createUser("tiny_" + userId, 1, taskExecutor, done, tasks);
        finishFutures.add(future);
    }

    // warm up
    for (int i = 0; i < 30; i++) {
        TimeUnit.MILLISECONDS.sleep(1000);
        System.out.println(taskExecutor);
    }
    tasks.clear();

    // run
    for (int i = 0; i < 60; i++) {
        TimeUnit.MILLISECONDS.sleep(1000);
        System.out.println(taskExecutor);
    }

    // capture finished tasks
    Map<Integer, Collection<SimulationTask>> middleTasks;
    synchronized (tasks) {
        middleTasks = new TreeMap<>(tasks.asMap());
    }

    // wait for finish
    done.set(true);
    Futures.allAsList(finishFutures).get(1, TimeUnit.MINUTES);

    Duration runtime = Duration.nanosSince(start).convertToMostSuccinctTimeUnit();
    synchronized (this) {
        System.out.println();
        System.out.println("Simulation finished in  " + runtime);
        System.out.println();

        for (Entry<Integer, Collection<SimulationTask>> entry : middleTasks.entrySet()) {
            Distribution durationDistribution = new Distribution();
            Distribution taskParallelismDistribution = new Distribution();

            for (SimulationTask task : entry.getValue()) {
                long taskStart = Long.MAX_VALUE;
                long taskEnd = 0;
                long totalCpuTime = 0;

                for (SimulationSplit split : task.getSplits()) {
                    taskStart = Math.min(taskStart, split.getStartNanos());
                    taskEnd = Math.max(taskEnd, split.getDoneNanos());
                    totalCpuTime += TimeUnit.MILLISECONDS.toNanos(split.getRequiredProcessMillis());
                }

                Duration taskDuration = new Duration(taskEnd - taskStart, NANOSECONDS)
                        .convertTo(TimeUnit.MILLISECONDS);
                durationDistribution.add(taskDuration.toMillis());

                double taskParallelism = 1.0 * totalCpuTime / (taskEnd - taskStart);
                taskParallelismDistribution.add((long) (taskParallelism * 100));
            }

            System.out.println("Splits " + entry.getKey() + ": Completed " + entry.getValue().size());

            Map<Double, Long> durationPercentiles = durationDistribution.getPercentiles();
            System.out.printf(
                    "   wall time ms :: p01 %4s :: p05 %4s :: p10 %4s :: p97 %4s :: p50 %4s :: p75 %4s :: p90 %4s :: p95 %4s :: p99 %4s\n",
                    durationPercentiles.get(0.01), durationPercentiles.get(0.05), durationPercentiles.get(0.10),
                    durationPercentiles.get(0.25), durationPercentiles.get(0.50), durationPercentiles.get(0.75),
                    durationPercentiles.get(0.90), durationPercentiles.get(0.95),
                    durationPercentiles.get(0.99));

            Map<Double, Long> parallelismPercentiles = taskParallelismDistribution.getPercentiles();
            System.out.printf(
                    "    parallelism :: p99 %4.2f :: p95 %4.2f :: p90 %4.2f :: p75 %4.2f :: p50 %4.2f :: p25 %4.2f :: p10 %4.2f :: p05 %4.2f :: p01 %4.2f\n",
                    parallelismPercentiles.get(0.99) / 100.0, parallelismPercentiles.get(0.95) / 100.0,
                    parallelismPercentiles.get(0.90) / 100.0, parallelismPercentiles.get(0.75) / 100.0,
                    parallelismPercentiles.get(0.50) / 100.0, parallelismPercentiles.get(0.25) / 100.0,
                    parallelismPercentiles.get(0.10) / 100.0, parallelismPercentiles.get(0.05) / 100.0,
                    parallelismPercentiles.get(0.01) / 100.0);
        }
    }
    Thread.sleep(10);
}

From source file:org.obm.icalendar.Ical4jHelper.java

private Collection<Event> addEventExceptionToDefinedParentEvent(Map<EventExtId, Event> mapEvents,
        Multimap<EventExtId, Event> mapExceptionEvents) {

    Collection<Entry<EventExtId, Collection<Event>>> mapExceptionEventsEntries = mapExceptionEvents.asMap()
            .entrySet();//from w w  w  .java 2  s . c  o  m

    for (Entry<EventExtId, Collection<Event>> entry : mapExceptionEventsEntries) {
        Event parentEvent = mapEvents.get(entry.getKey());
        Collection<Event> eventsException = entry.getValue();
        if (parentEvent != null) {
            addOrReplaceExceptions(parentEvent.getRecurrence(), eventsException);
        } else {
            logger.warn(
                    "Drop following events exception while parsing ICS file because parent was not defined: {}",
                    eventsException);
        }
    }
    return mapEvents.values();
}

From source file:org.apache.solr.search.ExtendedDismaxQParser.java

/**
 * Adds shingled phrase queries to all the fields specified in the pf, pf2 anf pf3 parameters
 * /*from   w  w  w . j  a va 2 s.c  om*/
 */
protected void addPhraseFieldQueries(BooleanQuery.Builder query, List<Clause> clauses,
        ExtendedDismaxConfiguration config) throws SyntaxError {

    // sloppy phrase queries for proximity
    List<FieldParams> allPhraseFields = config.getAllPhraseFields();

    if (allPhraseFields.size() > 0) {
        // find non-field clauses
        List<Clause> normalClauses = new ArrayList<>(clauses.size());
        for (Clause clause : clauses) {
            if (clause.field != null || clause.isPhrase)
                continue;
            // check for keywords "AND,OR,TO"
            if (clause.isBareWord()) {
                String s = clause.val;
                // avoid putting explicit operators in the phrase query
                if ("OR".equals(s) || "AND".equals(s) || "NOT".equals(s) || "TO".equals(s))
                    continue;
            }
            normalClauses.add(clause);
        }

        // create a map of {wordGram, [phraseField]}
        Multimap<Integer, FieldParams> phraseFieldsByWordGram = Multimaps.index(allPhraseFields,
                FieldParams::getWordGrams);

        // for each {wordGram, [phraseField]} entry, create and add shingled field queries to the main user query
        for (Map.Entry<Integer, Collection<FieldParams>> phraseFieldsByWordGramEntry : phraseFieldsByWordGram
                .asMap().entrySet()) {

            // group the fields within this wordGram collection by their associated slop (it's possible that the same
            // field appears multiple times for the same wordGram count but with different slop values. In this case, we
            // should take the *sum* of those phrase queries, rather than the max across them).
            Multimap<Integer, FieldParams> phraseFieldsBySlop = Multimaps
                    .index(phraseFieldsByWordGramEntry.getValue(), FieldParams::getSlop);
            for (Map.Entry<Integer, Collection<FieldParams>> phraseFieldsBySlopEntry : phraseFieldsBySlop
                    .asMap().entrySet()) {
                addShingledPhraseQueries(query, normalClauses, phraseFieldsBySlopEntry.getValue(),
                        phraseFieldsByWordGramEntry.getKey(), config.tiebreaker,
                        phraseFieldsBySlopEntry.getKey());
            }
        }
    }
}

From source file:com.metamx.http.client.NettyHttpClient.java

@Override
public <Intermediate, Final> ListenableFuture<Final> go(final Request request,
        final HttpResponseHandler<Intermediate, Final> handler, final Duration requestReadTimeout) {
    final HttpMethod method = request.getMethod();
    final URL url = request.getUrl();
    final Multimap<String, String> headers = request.getHeaders();

    final String requestDesc = String.format("%s %s", method, url);
    if (log.isDebugEnabled()) {
        log.debug("[%s] starting", requestDesc);
    }// www. j av a 2  s  . c  om

    // Block while acquiring a channel from the pool, then complete the request asynchronously.
    final Channel channel;
    final String hostKey = getPoolKey(url);
    final ResourceContainer<ChannelFuture> channelResourceContainer = pool.take(hostKey);
    final ChannelFuture channelFuture = channelResourceContainer.get().awaitUninterruptibly();
    if (!channelFuture.isSuccess()) {
        channelResourceContainer.returnResource(); // Some other poor sap will have to deal with it...
        return Futures.immediateFailedFuture(
                new ChannelException("Faulty channel in resource pool", channelFuture.getCause()));
    } else {
        channel = channelFuture.getChannel();
    }

    final String urlFile = Strings.nullToEmpty(url.getFile());
    final HttpRequest httpRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, method,
            urlFile.isEmpty() ? "/" : urlFile);

    if (!headers.containsKey(HttpHeaders.Names.HOST)) {
        httpRequest.headers().add(HttpHeaders.Names.HOST, getHost(url));
    }

    httpRequest.headers().set(HttpHeaders.Names.ACCEPT_ENCODING, HttpHeaders.Values.GZIP);

    for (Map.Entry<String, Collection<String>> entry : headers.asMap().entrySet()) {
        String key = entry.getKey();

        for (String obj : entry.getValue()) {
            httpRequest.headers().add(key, obj);
        }
    }

    if (request.hasContent()) {
        httpRequest.setContent(request.getContent());
    }

    final long readTimeout = getReadTimeout(requestReadTimeout);
    final SettableFuture<Final> retVal = SettableFuture.create();

    if (readTimeout > 0) {
        channel.getPipeline().addLast(READ_TIMEOUT_HANDLER_NAME,
                new ReadTimeoutHandler(timer, readTimeout, TimeUnit.MILLISECONDS));
    }

    channel.getPipeline().addLast(LAST_HANDLER_NAME, new SimpleChannelUpstreamHandler() {
        private volatile ClientResponse<Intermediate> response = null;

        @Override
        public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception {
            if (log.isDebugEnabled()) {
                log.debug("[%s] messageReceived: %s", requestDesc, e.getMessage());
            }
            try {
                Object msg = e.getMessage();

                if (msg instanceof HttpResponse) {
                    HttpResponse httpResponse = (HttpResponse) msg;
                    if (log.isDebugEnabled()) {
                        log.debug("[%s] Got response: %s", requestDesc, httpResponse.getStatus());
                    }

                    response = handler.handleResponse(httpResponse);
                    if (response.isFinished()) {
                        retVal.set((Final) response.getObj());
                    }

                    if (!httpResponse.isChunked()) {
                        finishRequest();
                    }
                } else if (msg instanceof HttpChunk) {
                    HttpChunk httpChunk = (HttpChunk) msg;
                    if (log.isDebugEnabled()) {
                        log.debug("[%s] Got chunk: %sB, last=%s", requestDesc,
                                httpChunk.getContent().readableBytes(), httpChunk.isLast());
                    }

                    if (httpChunk.isLast()) {
                        finishRequest();
                    } else {
                        response = handler.handleChunk(response, httpChunk);
                        if (response.isFinished() && !retVal.isDone()) {
                            retVal.set((Final) response.getObj());
                        }
                    }
                } else {
                    throw new IllegalStateException(String.format("Unknown message type[%s]", msg.getClass()));
                }
            } catch (Exception ex) {
                log.warn(ex, "[%s] Exception thrown while processing message, closing channel.", requestDesc);

                if (!retVal.isDone()) {
                    retVal.set(null);
                }
                channel.close();
                channelResourceContainer.returnResource();

                throw ex;
            }
        }

        private void finishRequest() {
            ClientResponse<Final> finalResponse = handler.done(response);
            if (!finalResponse.isFinished()) {
                throw new IllegalStateException(
                        String.format("[%s] Didn't get a completed ClientResponse Object from [%s]",
                                requestDesc, handler.getClass()));
            }
            if (!retVal.isDone()) {
                retVal.set(finalResponse.getObj());
            }
            removeHandlers();
            channelResourceContainer.returnResource();
        }

        @Override
        public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) throws Exception {
            if (log.isDebugEnabled()) {
                final Throwable cause = event.getCause();
                if (cause == null) {
                    log.debug("[%s] Caught exception", requestDesc);
                } else {
                    log.debug(cause, "[%s] Caught exception", requestDesc);
                }
            }

            retVal.setException(event.getCause());
            // response is non-null if we received initial chunk and then exception occurs
            if (response != null) {
                handler.exceptionCaught(response, event.getCause());
            }
            removeHandlers();
            try {
                channel.close();
            } catch (Exception e) {
                // ignore
            } finally {
                channelResourceContainer.returnResource();
            }

            context.sendUpstream(event);
        }

        @Override
        public void channelDisconnected(ChannelHandlerContext context, ChannelStateEvent event)
                throws Exception {
            if (log.isDebugEnabled()) {
                log.debug("[%s] Channel disconnected", requestDesc);
            }
            // response is non-null if we received initial chunk and then exception occurs
            if (response != null) {
                handler.exceptionCaught(response, new ChannelException("Channel disconnected"));
            }
            channel.close();
            channelResourceContainer.returnResource();
            if (!retVal.isDone()) {
                log.warn("[%s] Channel disconnected before response complete", requestDesc);
                retVal.setException(new ChannelException("Channel disconnected"));
            }
            context.sendUpstream(event);
        }

        private void removeHandlers() {
            if (readTimeout > 0) {
                channel.getPipeline().remove(READ_TIMEOUT_HANDLER_NAME);
            }
            channel.getPipeline().remove(LAST_HANDLER_NAME);
        }
    });

    channel.write(httpRequest).addListener(new ChannelFutureListener() {
        @Override
        public void operationComplete(ChannelFuture future) throws Exception {
            if (!future.isSuccess()) {
                channel.close();
                channelResourceContainer.returnResource();
                if (!retVal.isDone()) {
                    retVal.setException(new ChannelException(
                            String.format("[%s] Failed to write request to channel", requestDesc),
                            future.getCause()));
                }
            }
        }
    });

    return retVal;
}

From source file:eu.itesla_project.iidm.network.impl.NodeBreakerVoltageLevel.java

public void exportTopology(OutputStream os) throws IOException {
    Graph g = new Graph().id("\"" + NodeBreakerVoltageLevel.this.id + "\"");
    Map<Integer, Node> intToNode = new HashMap<>();
    Multimap<String, Integer> busToNodes = ArrayListMultimap.create();
    for (int n = 0; n < graph.getVertexCount(); n++) {
        Node node = new Node().id(Integer.toString(n));
        intToNode.put(n, node);//from  w  w  w  .  j av  a2 s . co  m
        Bus bus = getCalculatedBusBreakerTopology().getBus(n);
        if (bus != null) {
            busToNodes.put(bus.getId(), n);
        } else {
            TerminalExt terminal = graph.getVertexObject(n);
            if (terminal != null) {
                ConnectableImpl connectable = terminal.getConnectable();
                String label = n + "\\n" + connectable.getType().toString() + "\\n" + connectable.getId();
                node.attr("label", label);
                g.node(node);
            }
        }
    }
    String[] colors = generateColorScale(busToNodes.asMap().keySet().size());
    int i = 0;
    for (String key : busToNodes.asMap().keySet()) {
        Graph newBus = new Graph().id("\"" + key + "\"");
        newBus.attr("label", key);
        for (int nodeInt : busToNodes.get(key)) {
            Node node = intToNode.get(nodeInt);
            TerminalExt terminal = graph.getVertexObject(nodeInt);
            if (terminal != null) {
                ConnectableImpl connectable = terminal.getConnectable();
                String label = nodeInt + "\\n" + connectable.getType().toString() + "\\n" + connectable.getId();
                node.attr("label", label);
            }
            node.attr("style", "filled").attr("color", colors[i]);
            newBus.node(node);
        }
        g.subGraph(newBus);
        i++;
    }

    //        writer.append("graph \"").append(NodeBreakerVoltageLevel.this.id).append("\" {\n");
    //        for (int n = 0; n < graph.getVertexCount(); n++) {
    //            TerminalExt terminal = graph.getVertexObject(n);
    //            if (terminal != null) {
    //                ConnectableImpl connectable = terminal.getConnectable();
    //                String label = n + "\\n" + connectable.getType().toString() + "\\n" + connectable.getId();
    //                writer.append("  ").append(Integer.toString(n))
    //                        .append(" [label=\"").append(label).append("\"]\n");
    //            }
    //        }
    boolean drawSwitchId = true;
    for (int e = 0; e < graph.getEdgeCount(); e++) {
        Edge edge = new Edge(intToNode.get(graph.getEdgeVertex1(e)), intToNode.get(graph.getEdgeVertex2(e)))
                .id(Integer.toString(e));

        SwitchImpl _switch = graph.getEdgeObject(e);
        if (_switch != null) {
            if (drawSwitchId) {
                edge.attr("label", _switch.getKind().toString() + "\n" + _switch.getId()).attr("fontsize",
                        "10");
            }
            edge.attr("style", _switch.isOpen() ? "dotted" : "solid");
        }
        g.edge(edge);
    }
    g.writeTo(os);
    //        for (int e = 0; e < graph.getEdgeCount(); e++) {
    //            writer.append("  ").append(Integer.toString(graph.getEdgeVertex1(e)))
    //                    .append(" -- ").append(Integer.toString(graph.getEdgeVertex2(e)));
    //            SwitchImpl _switch = graph.getEdgeObject(e);
    //            if (_switch != null) {
    //                writer.append(" [");
    //                if (drawSwitchId) {
    //                    writer.append("label=\"").append(_switch.getId())
    //                            .append("\", fontsize=10");
    //                }
    //                writer.append("style=\"").append(_switch.isOpen() ? "dotted" : "solid").append("\"");
    //            }
    //            writer.append("]\n");
    //        }
    //        writer.append("}\n");
}

From source file:com.arpnetworking.metrics.mad.parsers.CollectdJsonToRecordParser.java

/**
 * Parses a collectd POST body./*from  w  w w  .  j  ava 2  s.c  o m*/
 *
 * @param request an HTTP request
 * @return A list of {@link DefaultRecord.Builder}
 * @throws ParsingException if the body is not parsable as collectd formatted json data
 */
public List<Record> parse(final HttpRequest request) throws ParsingException {
    final Map<String, String> metricTags = Maps.newHashMap();
    for (final Map.Entry<String, String> header : request.getHeaders().entries()) {
        if (header.getKey().toLowerCase(Locale.ENGLISH).startsWith(TAG_PREFIX)) {
            metricTags.put(header.getKey().toLowerCase(Locale.ENGLISH).substring(TAG_PREFIX.length()),
                    header.getValue());
        }
    }
    try {
        final List<CollectdRecord> records = OBJECT_MAPPER.readValue(request.getBody(), COLLECTD_RECORD_LIST);
        final List<Record> parsedRecords = Lists.newArrayList();
        for (final CollectdRecord record : records) {
            final Multimap<String, Metric> metrics = HashMultimap.create();

            metricTags.put(Key.HOST_DIMENSION_KEY, record.getHost());
            final DefaultRecord.Builder builder = new DefaultRecord.Builder()
                    .setId(UUID.randomUUID().toString()).setTime(record.getTime())
                    .setAnnotations(ImmutableMap.copyOf(metricTags))
                    .setDimensions(ImmutableMap.copyOf(metricTags));

            final String plugin = record.getPlugin();
            final String pluginInstance = record.getPluginInstance();
            final String type = record.getType();
            final String typeInstance = record.getTypeInstance();

            for (final CollectdRecord.Sample sample : record.getSamples()) {
                if (sample.getValue() == null) {
                    continue;
                }
                final String metricName = computeMetricName(plugin, pluginInstance, type, typeInstance,
                        sample.getDsName());
                final MetricType metricType = mapDsType(sample.getDsType());
                final Metric metric = new DefaultMetric.Builder().setType(metricType)
                        .setValues(Collections
                                .singletonList(new Quantity.Builder().setValue(sample.getValue()).build()))
                        .build();
                metrics.put(metricName, metric);
            }
            final Map<String, Metric> collectedMetrics = metrics.asMap().entrySet().stream()
                    .collect(Collectors.toMap(Map.Entry::getKey, CollectdJsonToRecordParser::mergeMetrics));
            builder.setMetrics(ImmutableMap.copyOf(collectedMetrics));
            parsedRecords.add(builder.build());
        }
        return parsedRecords;
    } catch (final IOException | ConstraintsViolatedException ex) {
        throw new ParsingException("Error parsing collectd json", request.getBody(), ex);
    }
}

From source file:net.minecraftforge.registries.GameData.java

@SuppressWarnings({ "unchecked", "rawtypes" })
public static Multimap<ResourceLocation, ResourceLocation> injectSnapshot(
        Map<ResourceLocation, ForgeRegistry.Snapshot> snapshot, boolean injectFrozenData,
        boolean isLocalWorld) {
    FMLLog.log.info("Injecting existing registry data into this {} instance",
            FMLCommonHandler.instance().getEffectiveSide().isServer() ? "server" : "client");
    RegistryManager.ACTIVE.registries.forEach((name, reg) -> reg.validateContent(name));
    RegistryManager.ACTIVE.registries.forEach((name, reg) -> reg.dump(name));
    RegistryManager.ACTIVE.registries.forEach((name, reg) -> reg.resetDelegates());

    List<ResourceLocation> missingRegs = snapshot.keySet().stream()
            .filter(name -> !RegistryManager.ACTIVE.registries.containsKey(name)).collect(Collectors.toList());
    if (missingRegs.size() > 0) {
        String text = "Forge Mod Loader detected missing/unknown registrie(s).\n\n" + "There are "
                + missingRegs.size() + " missing registries in this save.\n"
                + "If you continue the missing registries will get removed.\n"
                + "This may cause issues, it is advised that you create a world backup before continuing.\n\n"
                + "Missing Registries:\n";

        for (ResourceLocation s : missingRegs)
            text += s.toString() + "\n";

        if (!StartupQuery.confirm(text))
            StartupQuery.abort();/* w ww . j a  v  a2 s. c o m*/
    }

    RegistryManager STAGING = new RegistryManager("STAGING");

    final Map<ResourceLocation, Map<ResourceLocation, Integer[]>> remaps = Maps.newHashMap();
    final LinkedHashMap<ResourceLocation, Map<ResourceLocation, Integer>> missing = Maps.newLinkedHashMap();
    // Load the snapshot into the "STAGING" registry
    snapshot.forEach((key, value) -> {
        final Class<? extends IForgeRegistryEntry> clazz = RegistryManager.ACTIVE.getSuperType(key);
        remaps.put(key, Maps.newLinkedHashMap());
        missing.put(key, Maps.newHashMap());
        loadPersistentDataToStagingRegistry(RegistryManager.ACTIVE, STAGING, remaps.get(key), missing.get(key),
                key, value, clazz);
    });

    snapshot.forEach((key, value) -> {
        value.dummied.forEach(dummy -> {
            Map<ResourceLocation, Integer> m = missing.get(key);
            ForgeRegistry<?> reg = STAGING.getRegistry(key);

            // Currently missing locally, we just inject and carry on
            if (m.containsKey(dummy)) {
                if (reg.markDummy(dummy, m.get(dummy)))
                    m.remove(dummy);
            } else if (isLocalWorld) {
                if (ForgeRegistry.DEBUG)
                    FMLLog.log.debug("Registry {}: Resuscitating dummy entry {}", key, dummy);
            } else {
                // The server believes this is a dummy block identity, but we seem to have one locally. This is likely a conflict
                // in mod setup - Mark this entry as a dummy
                int id = reg.getID(dummy);
                FMLLog.log.warn(
                        "Registry {}: The ID {} is currently locally mapped - it will be replaced with a dummy for this session",
                        key, id);
                reg.markDummy(dummy, id);
            }
        });
    });

    int count = missing.values().stream().mapToInt(Map::size).sum();
    if (count > 0) {
        FMLLog.log.debug("There are {} mappings missing - attempting a mod remap", count);
        Multimap<ResourceLocation, ResourceLocation> defaulted = ArrayListMultimap.create();
        Multimap<ResourceLocation, ResourceLocation> failed = ArrayListMultimap.create();

        missing.entrySet().stream().filter(e -> e.getValue().size() > 0).forEach(m -> {
            ResourceLocation name = m.getKey();
            ForgeRegistry<?> reg = STAGING.getRegistry(name);
            RegistryEvent.MissingMappings<?> event = reg.getMissingEvent(name, m.getValue());
            MinecraftForge.EVENT_BUS.post(event);

            List<MissingMappings.Mapping<?>> lst = event.getAllMappings().stream()
                    .filter(e -> e.getAction() == MissingMappings.Action.DEFAULT).collect(Collectors.toList());
            if (!lst.isEmpty()) {
                FMLLog.log.error("Unidentified mapping from registry {}", name);
                lst.forEach(map -> {
                    FMLLog.log.error("    {}: {}", map.key, map.id);
                    if (!isLocalWorld)
                        defaulted.put(name, map.key);
                });
            }
            event.getAllMappings().stream().filter(e -> e.getAction() == MissingMappings.Action.FAIL)
                    .forEach(fail -> failed.put(name, fail.key));

            final Class<? extends IForgeRegistryEntry> clazz = RegistryManager.ACTIVE.getSuperType(name);
            processMissing(clazz, name, STAGING, event, m.getValue(), remaps.get(name), defaulted.get(name),
                    failed.get(name));
        });

        if (!defaulted.isEmpty() && !isLocalWorld)
            return defaulted;

        if (!defaulted.isEmpty()) {
            StringBuilder buf = new StringBuilder();
            buf.append("Forge Mod Loader detected missing registry entries.\n\n").append("There are ")
                    .append(defaulted.size()).append(" missing entries in this save.\n")
                    .append("If you continue the missing entries will get removed.\n")
                    .append("A world backup will be automatically created in your saves directory.\n\n");

            defaulted.asMap().forEach((name, entries) -> {
                buf.append("Missing ").append(name).append(":\n");
                entries.forEach(rl -> buf.append("    ").append(rl).append("\n"));
            });

            boolean confirmed = StartupQuery.confirm(buf.toString());
            if (!confirmed)
                StartupQuery.abort();

            try {
                String skip = System.getProperty("fml.doNotBackup");
                if (skip == null || !"true".equals(skip)) {
                    ZipperUtil.backupWorld();
                } else {
                    for (int x = 0; x < 10; x++)
                        FMLLog.log.error("!!!!!!!!!! UPDATING WORLD WITHOUT DOING BACKUP !!!!!!!!!!!!!!!!");
                }
            } catch (IOException e) {
                StartupQuery.notify("The world backup couldn't be created.\n\n" + e);
                StartupQuery.abort();
            }
        }

        if (!defaulted.isEmpty()) {
            if (isLocalWorld)
                FMLLog.log.error(
                        "There are unidentified mappings in this world - we are going to attempt to process anyway");
        }

    }

    if (injectFrozenData) {
        // If we're loading from disk, we can actually substitute air in the block map for anything that is otherwise "missing". This keeps the reference in the map, in case
        // the block comes back later
        missing.forEach((name, m) -> {
            ForgeRegistry<?> reg = STAGING.getRegistry(name);
            m.forEach((rl, id) -> reg.markDummy(rl, id));
        });

        // If we're loading up the world from disk, we want to add in the new data that might have been provisioned by mods
        // So we load it from the frozen persistent registry
        RegistryManager.ACTIVE.registries.forEach((name, reg) -> {
            final Class<? extends IForgeRegistryEntry> clazz = RegistryManager.ACTIVE.getSuperType(name);
            loadFrozenDataToStagingRegistry(STAGING, name, remaps.get(name), clazz);
        });
    }

    // Validate that all the STAGING data is good
    STAGING.registries.forEach((name, reg) -> reg.validateContent(name));

    // Load the STAGING registry into the ACTIVE registry
    for (Map.Entry<ResourceLocation, ForgeRegistry<? extends IForgeRegistryEntry<?>>> r : RegistryManager.ACTIVE.registries
            .entrySet()) {
        final Class<? extends IForgeRegistryEntry> registrySuperType = RegistryManager.ACTIVE
                .getSuperType(r.getKey());
        loadRegistry(r.getKey(), STAGING, RegistryManager.ACTIVE, registrySuperType, true);
    }

    // Dump the active registry
    RegistryManager.ACTIVE.registries.forEach((name, reg) -> reg.dump(name));

    // Tell mods that the ids have changed
    Loader.instance().fireRemapEvent(remaps, false);

    // The id map changed, ensure we apply object holders
    ObjectHolderRegistry.INSTANCE.applyObjectHolders();

    // Return an empty list, because we're good
    return ArrayListMultimap.create();
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonUsingModules.java

public Multimap<Double, String> SubsetsMatching(final PathwayUsingModules firstPathway,
        final PathwayUsingModules secondPathway, BiMap<String, Integer> newSourceGeneIdToPositionMap,
        BiMap<String, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> resultPerfect = TreeMultimap.create(Ordering.natural().reverse(),
            Ordering.natural());/*  w w  w.  java  2s . c om*/
    PathwayUsingModules firstPathwayCopy = new PathwayUsingModules(firstPathway);// Copy of the Query pathway
    PathwayUsingModules secondPathwayCopy = new PathwayUsingModules(secondPathway);// Copy of the Target pathway'
    // PathwayUsingModules secondPathwayCopy1 = new PathwayUsingModules(secondPathway);
    int currentQueryGene = 0;
    Iterator<Module> sourceGeneIt = firstPathway.geneIterator();
    List<String> QueryToRemove = new ArrayList<String>();
    List<String> TargetToRemove = new ArrayList<String>();
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;
        Module queryGene = sourceGeneIt.next();

        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Domain d : queryGene.getDomains()) {
            qfunction.add(d.getDomainFunctionString());
            qfunctionList.add(d.getDomainFunctionString());
            qactivity.add(d.getStatus().toString());
            qsubstrate.add(d.getSubstrates());
        }
        Iterator<Module> targetGeneIt = secondPathway.geneIterator();

        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            Module targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tfunctionList = new ArrayList<String>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (Domain d : targetGene.getDomains()) {
                tfunctionList.add(d.getDomainFunctionString());
                tfunction.add(d.getDomainFunctionString());
                tactivity.add(d.getStatus().toString());
                tsubstrate.add(d.getSubstrates());
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            if (DomainsCovered.size() == qfunction.size() && DomainsCovered.size() == tfunction.size()) {
                Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                        tactivity);
                Multimap<String, Integer> Functionscores = ArrayListMultimap.create();

                int TranspositionDomains = LevenshteinDistance.computeLevenshteinDistance(qfunctionList,
                        tfunctionList);
                if (TranspositionDomains > 0) {
                    TranspositionDomains = 1;
                }

                Functionscores.put(qfunction.size() + "-0", TranspositionDomains);
                Multimap<Double, Multimap<String, Integer>> substratescore = myFunction
                        .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
                Object activityScore = activityscores.asMap().keySet().toArray()[0];
                Object substrateScore = substratescore.asMap().keySet().toArray()[0];
                double finalScore = Math
                        .round((((2.9 * 1.0) + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                                + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                        / 100.0;
                String ConvertedGeneIDs = "";
                if (Yes == 0) {
                    ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                            newSourceGeneIdToPositionMap) + "->"
                            + reconstructWithGeneId(Integer.toString(currentTargetGene),
                                    newTargetGeneIdToPositionMap);
                } else {
                    ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentTargetGene),
                            newTargetGeneIdToPositionMap) + "->"
                            + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                    newSourceGeneIdToPositionMap);
                }
                resultPerfect.put(finalScore, ConvertedGeneIDs);
                ScoreFunctionMatchMisMatch.put(ConvertedGeneIDs, Functionscores);
                ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
                ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescore.values());

                TargetToRemove.add(reconstructWithGeneId(Integer.toString(currentTargetGene),
                        newTargetGeneIdToPositionMap));
                QueryToRemove.add(reconstructWithGeneId(Integer.toString(currentQueryGene),
                        newSourceGeneIdToPositionMap));
            }
        }

    }
    for (String i : TargetToRemove) {
        secondPathwayCopy.removeModule(i);
    }
    for (String i : QueryToRemove) {
        firstPathwayCopy.removeModule(i);
    }
    if (firstPathwayCopy.size() > 0 && secondPathwayCopy.size() > 0) {
        // Re-construct the bimaps
        newSourceGeneIdToPositionMap = HashBiMap.create();
        int temp = 0;
        for (Module e : firstPathwayCopy.getModules()) {
            temp = temp + 1;
            newSourceGeneIdToPositionMap.put(e.getModuleId(), temp);
        }
        newTargetGeneIdToPositionMap = HashBiMap.create();
        temp = 0;
        for (Module e : secondPathwayCopy.getModules()) {
            temp = temp + 1;
            newTargetGeneIdToPositionMap.put(e.getModuleId(), temp);
        }
        resultPerfect.putAll(SubsetIdentification(firstPathwayCopy, secondPathwayCopy,
                newSourceGeneIdToPositionMap, newTargetGeneIdToPositionMap, Yes));
    }
    ////System.out.println(resultPerfect);
    return resultPerfect;
}