Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:org.sosy_lab.cpachecker.cpa.automaton.AutomatonTransferRelation.java

/**
 * Returns the <code>AutomatonStates</code> that follow this State in the ControlAutomatonCPA.
 * If the passed <code>AutomatonExpressionArguments</code> are not sufficient to determine the following state
 * this method returns a <code>AutomatonUnknownState</code> that contains this as previous State.
 * The strengthen method of the <code>AutomatonUnknownState</code> should be used once enough Information is available to determine the correct following State.
 *
 * If the state is a NonDet-State multiple following states may be returned.
 * If the only following state is BOTTOM an empty set is returned.
 * @throws CPATransferException//w ww. ja va2s  .com
 */
private Collection<AutomatonState> getFollowStates(AutomatonState state, List<AbstractState> otherElements,
        CFAEdge edge, boolean failOnUnknownMatch) throws CPATransferException {
    Preconditions.checkArgument(!(state instanceof AutomatonUnknownState));
    if (state == cpa.getBottomState()) {
        return Collections.emptySet();
    }

    if (collectTokenInformation) {
        SourceLocationMapper.getKnownToEdge(edge);
    }

    if (state.getInternalState().getTransitions().isEmpty()) {
        // shortcut
        return Collections.singleton(state);
    }

    Collection<AutomatonState> lSuccessors = Sets.newHashSetWithExpectedSize(2);
    AutomatonExpressionArguments exprArgs = new AutomatonExpressionArguments(state, state.getVars(),
            otherElements, edge, logger);
    boolean edgeMatched = false;
    int failedMatches = 0;
    boolean nonDetState = state.getInternalState().isNonDetState();

    // these transitions cannot be evaluated until last, because they might have sideeffects on other CPAs (dont want to execute them twice)
    // the transitionVariables have to be cached (produced during the match operation)
    // the list holds a Transition and the TransitionVariables generated during its match
    List<Pair<AutomatonTransition, Map<Integer, String>>> transitionsToBeTaken = new ArrayList<>(2);

    for (AutomatonTransition t : state.getInternalState().getTransitions()) {
        exprArgs.clearTransitionVariables();

        matchTime.start();
        ResultValue<Boolean> match = t.match(exprArgs);
        matchTime.stop();

        if (match.canNotEvaluate()) {
            if (failOnUnknownMatch) {
                throw new CPATransferException(
                        "Automaton transition condition could not be evaluated: " + match.getFailureMessage());
            }
            // if one transition cannot be evaluated the evaluation must be postponed until enough information is available
            return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state));
        } else {
            if (match.getValue()) {
                edgeMatched = true;
                assertionsTime.start();
                ResultValue<Boolean> assertionsHold = t.assertionsHold(exprArgs);
                assertionsTime.stop();

                if (assertionsHold.canNotEvaluate()) {
                    if (failOnUnknownMatch) {
                        throw new CPATransferException(
                                "Automaton transition assertions could not be evaluated: "
                                        + assertionsHold.getFailureMessage());
                    }
                    // cannot yet be evaluated
                    return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state));

                } else if (assertionsHold.getValue()) {
                    if (!t.canExecuteActionsOn(exprArgs)) {
                        if (failOnUnknownMatch) {
                            throw new CPATransferException("Automaton transition action could not be executed");
                        }
                        // cannot yet execute, goto UnknownState
                        return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state));
                    }

                    // delay execution as described above
                    Map<Integer, String> transitionVariables = ImmutableMap
                            .copyOf(exprArgs.getTransitionVariables());
                    transitionsToBeTaken.add(Pair.of(t, transitionVariables));

                } else {
                    // matching transitions, but unfulfilled assertions: goto error state
                    AutomatonState errorState = AutomatonState.automatonStateFactory(
                            Collections.<String, AutomatonVariable>emptyMap(), AutomatonInternalState.ERROR,
                            cpa, 0, 0, "");
                    logger.log(Level.INFO,
                            "Automaton going to ErrorState on edge \"" + edge.getDescription() + "\"");
                    lSuccessors.add(errorState);
                }

                if (!nonDetState) {
                    // not a nondet State, break on the first matching edge
                    break;
                }
            } else {
                // do nothing if the edge did not match
                failedMatches++;
            }
        }
    }

    if (edgeMatched) {
        // execute Transitions
        for (Pair<AutomatonTransition, Map<Integer, String>> pair : transitionsToBeTaken) {
            // this transition will be taken. copy the variables
            AutomatonTransition t = pair.getFirst();
            Map<Integer, String> transitionVariables = pair.getSecond();
            actionTime.start();
            Map<String, AutomatonVariable> newVars = deepCloneVars(state.getVars());
            exprArgs.setAutomatonVariables(newVars);
            exprArgs.putTransitionVariables(transitionVariables);
            t.executeActions(exprArgs);
            actionTime.stop();
            String violatedPropertyDescription = null;
            if (t.getFollowState().isTarget()) {
                violatedPropertyDescription = t.getViolatedPropertyDescription(exprArgs);
            }
            AutomatonState lSuccessor = AutomatonState.automatonStateFactory(newVars, t.getFollowState(), cpa,
                    t.getAssumptions(), state.getMatches() + 1, state.getFailedMatches(),
                    violatedPropertyDescription);
            if (!(lSuccessor instanceof AutomatonState.BOTTOM)) {
                lSuccessors.add(lSuccessor);
            } else {
                // add nothing
            }
        }
        return lSuccessors;
    } else {
        // stay in same state, no transitions to be executed here (no transition matched)
        AutomatonState stateNewCounters = AutomatonState.automatonStateFactory(state.getVars(),
                state.getInternalState(), cpa, state.getMatches(), state.getFailedMatches() + failedMatches,
                null);
        if (collectTokenInformation) {
            stateNewCounters.addNoMatchTokens(state.getTokensSinceLastMatch());
            if (edge.getEdgeType() != CFAEdgeType.DeclarationEdge) {
                stateNewCounters
                        .addNoMatchTokens(SourceLocationMapper.getAbsoluteTokensFromCFAEdge(edge, true));
            }
        }
        return Collections.singleton(stateNewCounters);
    }
}

From source file:org.gradoop.util.AsciiGraphLoader.java

/**
 * Returns vertices by their given variables.
 *
 * @param variables variables used in GDL script
 * @return vertices//w  ww . j a va  2s  .  c  o m
 */
public Collection<V> getVerticesByVariables(String... variables) {
    Collection<V> result = Sets.newHashSetWithExpectedSize(variables.length);
    for (String variable : variables) {
        V vertex = getVertexByVariable(variable);
        if (vertex != null) {
            result.add(vertex);
        }
    }
    return result;
}

From source file:edu.buaa.satla.analysis.core.automaton.AutomatonTransferRelation.java

/**
 * Returns the <code>AutomatonStates</code> that follow this State in the ControlAutomatonCPA.
 * If the passed <code>AutomatonExpressionArguments</code> are not sufficient to determine the following state
 * this method returns a <code>AutomatonUnknownState</code> that contains this as previous State.
 * The strengthen method of the <code>AutomatonUnknownState</code> should be used once enough Information is available to determine the correct following State.
 *
 * If the state is a NonDet-State multiple following states may be returned.
 * If the only following state is BOTTOM an empty set is returned.
 * @throws CPATransferException/*from  w ww  .  j  a v a  2  s  .co m*/
 */
private Collection<AutomatonState> getFollowStates(AutomatonState state, List<AbstractState> otherElements,
        CFAEdge edge, boolean failOnUnknownMatch) throws CPATransferException {
    Preconditions.checkArgument(!(state instanceof AutomatonUnknownState));
    if (state == cpa.getBottomState()) {
        return Collections.emptySet();
    }

    if (collectTokenInformation) {
        SourceLocationMapper.getKnownToEdge(edge);
    }

    if (state.getInternalState().getTransitions().isEmpty()) {
        // shortcut
        return Collections.singleton(state);
    }

    Collection<AutomatonState> lSuccessors = Sets.newHashSetWithExpectedSize(2);
    AutomatonExpressionArguments exprArgs = new AutomatonExpressionArguments(state, state.getVars(),
            otherElements, edge, logger);
    boolean edgeMatched = false;
    int failedMatches = 0;
    boolean nonDetState = state.getInternalState().isNonDetState();

    // these transitions cannot be evaluated until last, because they might have sideeffects on other CPAs (dont want to execute them twice)
    // the transitionVariables have to be cached (produced during the match operation)
    // the list holds a Transition and the TransitionVariables generated during its match
    List<Pair<AutomatonTransition, Map<Integer, String>>> transitionsToBeTaken = new ArrayList<>(2);

    for (AutomatonTransition t : state.getInternalState().getTransitions()) {
        exprArgs.clearTransitionVariables();

        matchTime.start();
        ResultValue<Boolean> match = t.match(exprArgs);
        matchTime.stop();

        //      System.out.println("----------------------");
        //      System.out.println(t.getTrigger());
        //      System.out.println(t.getFollowState().getName());
        //      System.out.println(edge.getPredecessor().getNodeNumber());
        //      System.out.println(edge.getCode());
        //      System.out.println(match.getValue());

        if (match.canNotEvaluate()) {
            if (failOnUnknownMatch) {
                throw new CPATransferException(
                        "Automaton transition condition could not be evaluated: " + match.getFailureMessage());
            }
            // if one transition cannot be evaluated the evaluation must be postponed until enough information is available
            return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state));
        } else {
            if (match.getValue()) {
                edgeMatched = true;
                assertionsTime.start();
                ResultValue<Boolean> assertionsHold = t.assertionsHold(exprArgs);
                assertionsTime.stop();

                if (assertionsHold.canNotEvaluate()) {
                    if (failOnUnknownMatch) {
                        throw new CPATransferException(
                                "Automaton transition assertions could not be evaluated: "
                                        + assertionsHold.getFailureMessage());
                    }
                    // cannot yet be evaluated
                    return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state));

                } else if (assertionsHold.getValue()) {
                    if (!t.canExecuteActionsOn(exprArgs)) {
                        if (failOnUnknownMatch) {
                            throw new CPATransferException("Automaton transition action could not be executed");
                        }
                        // cannot yet execute, goto UnknownState
                        return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state));
                    }

                    // delay execution as described above
                    Map<Integer, String> transitionVariables = ImmutableMap
                            .copyOf(exprArgs.getTransitionVariables());
                    transitionsToBeTaken.add(Pair.of(t, transitionVariables));

                } else {
                    // matching transitions, but unfulfilled assertions: goto error state
                    AutomatonState errorState = AutomatonState.automatonStateFactory(
                            Collections.<String, AutomatonVariable>emptyMap(), AutomatonInternalState.ERROR,
                            cpa, 0, 0, "");
                    logger.log(Level.INFO,
                            "Automaton going to ErrorState on edge \"" + edge.getDescription() + "\"");
                    lSuccessors.add(errorState);
                }

                if (!nonDetState) {
                    // not a nondet State, break on the first matching edge
                    break;
                }
            } else {
                // do nothing if the edge did not match
                failedMatches++;
            }
        }
    }

    if (edgeMatched) {
        // execute Transitions
        for (Pair<AutomatonTransition, Map<Integer, String>> pair : transitionsToBeTaken) {
            // this transition will be taken. copy the variables
            AutomatonTransition t = pair.getFirst();
            Map<Integer, String> transitionVariables = pair.getSecond();
            actionTime.start();
            Map<String, AutomatonVariable> newVars = deepCloneVars(state.getVars());
            exprArgs.setAutomatonVariables(newVars);
            exprArgs.putTransitionVariables(transitionVariables);
            t.executeActions(exprArgs);
            actionTime.stop();
            String violatedPropertyDescription = null;
            if (t.getFollowState().isTarget()) {
                violatedPropertyDescription = t.getViolatedPropertyDescription(exprArgs);
            }
            AutomatonState lSuccessor = AutomatonState.automatonStateFactory(newVars, t.getFollowState(), cpa,
                    t.getAssumptions(), state.getMatches() + 1, state.getFailedMatches(),
                    violatedPropertyDescription);
            if (!(lSuccessor instanceof AutomatonState.BOTTOM)) {
                lSuccessors.add(lSuccessor);
            } else {
                // add nothing
            }
        }
        return lSuccessors;
    } else {
        // stay in same state, no transitions to be executed here (no transition matched)
        AutomatonState stateNewCounters = AutomatonState.automatonStateFactory(state.getVars(),
                state.getInternalState(), cpa, state.getMatches(), state.getFailedMatches() + failedMatches,
                null);
        if (collectTokenInformation) {
            stateNewCounters.addNoMatchTokens(state.getTokensSinceLastMatch());
            if (edge.getEdgeType() != CFAEdgeType.DeclarationEdge) {
                stateNewCounters
                        .addNoMatchTokens(SourceLocationMapper.getAbsoluteTokensFromCFAEdge(edge, true));
            }
        }
        return Collections.singleton(stateNewCounters);
    }
}

From source file:org.atlasapi.remotesite.bbc.BbcProgrammeGraphExtractor.java

private Set<Encoding> encodingsFrom(IonVersion ionVersion, String pid) {
    List<IonOndemandChange> ondemands = ionVersion.getOndemands();
    Set<Encoding> encodings = Sets.newHashSetWithExpectedSize(ondemands.size());
    for (IonOndemandChange ondemand : ondemands) {

        Maybe<Encoding> encoding = encodingCreator.createEncoding(ondemand, pid);
        if (encoding.hasValue()) {
            encodings.add(encoding.requireValue());
        }//from  www .  j  a  v a  2s .com
    }
    return encodings;
}

From source file:com.android.tools.idea.rendering.AppResourceRepository.java

/**
 * Looks up the library dependencies from the Gradle tools model and returns the corresponding {@code .aar}
 * resource directories.// www  . ja v  a 2s .co  m
 */
@NotNull
private static List<File> findAarLibrariesFromGradle(List<AndroidFacet> dependentFacets,
        List<AndroidLibrary> libraries) {
    // Pull out the unique directories, in case multiple modules point to the same .aar folder
    Set<File> files = Sets.newHashSetWithExpectedSize(dependentFacets.size());

    Set<String> moduleNames = Sets.newHashSet();
    for (AndroidFacet f : dependentFacets) {
        moduleNames.add(f.getModule().getName());
    }
    for (AndroidLibrary library : libraries) {
        // We should only add .aar dependencies if they aren't already provided as modules.
        // For now, the way we associate them with each other is via the library name;
        // in the future the model will provide this for us
        String libraryName = null;
        String projectName = library.getProject();
        if (projectName != null && !projectName.isEmpty()) {
            libraryName = projectName.substring(projectName.lastIndexOf(':') + 1);
            // Since this library has project!=null, it exists in module form; don't
            // add it here.
            moduleNames.add(libraryName);
            continue;
        } else {
            File folder = library.getFolder();
            String name = folder.getName();
            if (name.endsWith(DOT_AAR)) {
                libraryName = name.substring(0, name.length() - DOT_AAR.length());
            } else if (folder.getPath().contains(EXPLODED_AAR)) {
                libraryName = folder.getParentFile().getName();
            }
        }
        if (libraryName != null && !moduleNames.contains(libraryName)) {
            File resFolder = library.getResFolder();
            if (resFolder.exists()) {
                files.add(resFolder);

                // Don't add it again!
                moduleNames.add(libraryName);
            }
        }
    }

    List<File> dirs = Lists.newArrayList();
    for (File resFolder : files) {
        dirs.add(resFolder);
    }

    // Sort alphabetically to ensure that we keep a consistent order of these libraries;
    // otherwise when we jump from libraries initialized from IntelliJ library binary paths
    // to gradle project state, the order difference will cause the merged project resource
    // maps to have to be recomputed
    Collections.sort(dirs);
    return dirs;
}

From source file:com.twitter.graphjet.bipartite.edgepool.EdgePoolConcurrentTestHelper.java

/**
 * This helper method sets up a concurrent read-write situation with a single writer and multiple
 * readers that access the same underlying edgePool, and tests for correct edge access during
 * simultaneous edge writes. This helps test read consistency during arbitrary points of
 * inserting edges. Note that the exact read-write sequence here is non-deterministic and would
 * vary depending on the machine, but the hope is that given the large number of readers the reads
 * would be done at many different points of edge insertion. The test itself checks only for
 * partial correctness (it could have false positives) so this should only be used as a supplement
 * to other testing./*from www  .  j a v a 2  s  .c  om*/
 *
 * @param edgePool           is the underlying
 *                           {@link com.twitter.graphjet.bipartite.edgepool.EdgePool}
 * @param numReadersPerNode  is the number of reader threads to use per node
 * @param leftSize           is the number of left nodes
 * @param rightSize          is the number of right nodes
 * @param edgeProbability    is the probability of an edge between a left-right node pair
 * @param random             is the random number generator to use for generating a random graph
 */
public static void testRandomConcurrentReadWriteThreads(EdgePool edgePool, int numReadersPerNode, int leftSize,
        int rightSize, double edgeProbability, Random random) {
    int maxWaitingTimeForThreads = 20; // in milliseconds
    int numReaders = leftSize * numReadersPerNode;
    CountDownLatch readersDoneLatch = new CountDownLatch(numReaders);
    // First, construct a random set of edges to insert in the graph
    Set<Pair<Integer, Integer>> edges = Sets
            .newHashSetWithExpectedSize((int) (leftSize * rightSize * edgeProbability));
    List<EdgePoolReader> readers = Lists.newArrayListWithCapacity(numReaders);
    Int2ObjectMap<IntSet> leftSideGraph = new Int2ObjectOpenHashMap<IntSet>(leftSize);
    int averageLeftDegree = (int) (rightSize * edgeProbability);
    for (int i = 0; i < leftSize; i++) {
        IntSet nodeEdges = new IntOpenHashSet(averageLeftDegree);
        for (int j = 0; j < rightSize; j++) {
            if (random.nextDouble() < edgeProbability) {
                nodeEdges.add(j);
                edges.add(Pair.of(i, j));
            }
        }
        leftSideGraph.put(i, nodeEdges);
    }

    // Create a bunch of leftReaders per node that'll read from the graph at random
    for (int i = 0; i < leftSize; i++) {
        for (int j = 0; j < numReadersPerNode; j++) {
            readers.add(new EdgePoolReader(edgePool, new CountDownLatch(0), readersDoneLatch, i,
                    random.nextInt(maxWaitingTimeForThreads)));
        }
    }

    // Create a single writer that will insert these edges in random order
    List<WriterInfo> writerInfo = Lists.newArrayListWithCapacity(edges.size());
    List<Pair<Integer, Integer>> edgesList = Lists.newArrayList(edges);
    Collections.shuffle(edgesList);
    CountDownLatch writerDoneLatch = new CountDownLatch(edgesList.size());
    for (Pair<Integer, Integer> edge : edgesList) {
        writerInfo.add(new WriterInfo(edge.getLeft(), edge.getRight(), new CountDownLatch(0), writerDoneLatch));
    }

    ExecutorService executor = Executors.newFixedThreadPool(numReaders + 1); // single writer
    List<Callable<Integer>> allThreads = Lists.newArrayListWithCapacity(numReaders + 1);
    // First, we add the writer
    allThreads.add(Executors.callable(new EdgePoolWriter(edgePool, writerInfo), 1));
    // then the readers
    for (int i = 0; i < numReaders; i++) {
        allThreads.add(Executors.callable(readers.get(i), 1));
    }
    // these will execute in some non-deterministic order
    Collections.shuffle(allThreads, random);

    // Wait for all the processes to finish
    try {
        List<Future<Integer>> results = executor.invokeAll(allThreads, 10, TimeUnit.SECONDS);
        for (Future<Integer> result : results) {
            assertTrue(result.isDone());
            assertEquals(1, result.get().intValue());
        }
    } catch (InterruptedException e) {
        throw new RuntimeException("Execution for a thread was interrupted: ", e);
    } catch (ExecutionException e) {
        throw new RuntimeException("Execution issue in an executor thread: ", e);
    }

    // confirm that these worked as expected
    try {
        readersDoneLatch.await();
        writerDoneLatch.await();
    } catch (InterruptedException e) {
        throw new RuntimeException("Execution for last reader was interrupted: ", e);
    }

    // Check that all readers' read info is consistent with the graph
    for (EdgePoolReader reader : readers) {
        IntSet expectedEdges = leftSideGraph.get(reader.queryNode);
        assertTrue(reader.getQueryNodeDegree() <= expectedEdges.size());
        if (reader.getQueryNodeDegree() == 0) {
            assertNull(reader.getQueryNodeEdges());
        } else {
            for (int edge : reader.getQueryNodeEdges()) {
                assertTrue(expectedEdges.contains(edge));
            }
        }
    }
}

From source file:org.sosy_lab.cpachecker.pcc.strategy.parallel.interleaved.PartialReachedSetIOCheckingOnlyInterleavedCMCStrategy.java

@Override
protected void writeProofToStream(ObjectOutputStream pOut, UnmodifiableReachedSet pReached)
        throws IOException, InvalidConfigurationException, InterruptedException {
    if (!(pReached instanceof HistoryForwardingReachedSet)) {
        throw new InvalidConfigurationException(
                "Reached sets used by restart algorithm are not memorized. Please enable option analysis.memorizeReachedAfterRestart");
    }/*from w w w  .ja v a2 s  . c o  m*/

    List<ReachedSet> partialReachedSets = ((HistoryForwardingReachedSet) pReached)
            .getAllReachedSetsUsedAsDelegates();

    if (partialReachedSets == null || partialReachedSets.isEmpty()) {
        logger.log(Level.SEVERE, "No proof parts available. Proof cannot be generated.");
        return;
    }

    List<ConfigurableProgramAnalysis> cpas = ((HistoryForwardingReachedSet) pReached).getCPAs();

    if (partialReachedSets.size() != cpas.size()) {
        logger.log(Level.SEVERE, "Analysis inconsistent. Proof cannot be generated.");
        return;
    }

    logger.log(Level.FINEST, "Write number of proof parts to proof");
    pOut.writeInt(partialReachedSets.size());

    CMCPartitioningIOHelper ioHelper;
    Set<ARGState> unexplored;
    try {
        ReachedSet reached;
        for (int i = 0; i < partialReachedSets.size(); i++) {
            GlobalInfo.getInstance().setUpInfoFromCPA(cpas.get(i));
            reached = partialReachedSets.get(i);

            unexplored = Sets.newHashSetWithExpectedSize(reached.getWaitlist().size());
            for (AbstractState toExplore : reached.getWaitlist()) {
                unexplored.add((ARGState) toExplore);
            }

            ioHelper = new CMCPartitioningIOHelper(config, logger, shutdown,
                    automatonWriter.getAllAncestorsFor(unexplored), unexplored,
                    (ARGState) reached.getFirstState());
            ioHelper.writeProof(pOut, reached);
        }
    } catch (ClassCastException e) {
        logger.log(Level.SEVERE, "Stop writing proof. Not all analysis use ARG CPA as top level CPA");
    }
}

From source file:org.sonatype.nexus.apachehttpclient.Hc4ProviderBase.java

protected void configureProxy(final DefaultHttpClient httpClient,
        final RemoteProxySettings remoteProxySettings) {
    if (remoteProxySettings.isEnabled()) {
        getLogger().debug("proxy setup with host '{}'", remoteProxySettings.getHostname());

        final HttpHost proxy = new HttpHost(remoteProxySettings.getHostname(), remoteProxySettings.getPort());
        httpClient.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy);

        // check if we have non-proxy hosts
        if (remoteProxySettings.getNonProxyHosts() != null
                && !remoteProxySettings.getNonProxyHosts().isEmpty()) {
            final Set<Pattern> nonProxyHostPatterns = Sets
                    .newHashSetWithExpectedSize(remoteProxySettings.getNonProxyHosts().size());
            for (String nonProxyHostRegex : remoteProxySettings.getNonProxyHosts()) {
                try {
                    nonProxyHostPatterns.add(Pattern.compile(nonProxyHostRegex, Pattern.CASE_INSENSITIVE));
                } catch (PatternSyntaxException e) {
                    getLogger().warn("Invalid non proxy host regex: {}", nonProxyHostRegex, e);
                }//from w ww. j  a v  a2  s  .com
            }
            httpClient.setRoutePlanner(new NonProxyHostsAwareHttpRoutePlanner(
                    httpClient.getConnectionManager().getSchemeRegistry(), nonProxyHostPatterns));
        }

        configureAuthentication(httpClient, remoteProxySettings.getProxyAuthentication(), proxy);
    }
}

From source file:com.b2international.snowowl.datastore.server.DelegateCDOServerChangeManager.java

/**
 * Provides a way to handle transactions after they have been committed to the lightweight store.
 * @param monitor/*from w w w  .j  ava2s . c om*/
 */
public void handleTransactionAfterCommitted() {
    RuntimeException caughtException = null;
    final Collection<ICDOChangeProcessor> committedChangeProcessors = newConcurrentHashSet();
    final Collection<IndexCommitChangeSet> indexCommitChangeSets = newConcurrentHashSet();

    try {
        final Metrics metrics = MetricsThreadLocal.get();

        final Collection<Job> commitJobs = Sets.newHashSetWithExpectedSize(changeProcessors.size());

        for (final ICDOChangeProcessor processor : changeProcessors) {
            commitJobs.add(new Job("Committing " + processor.getName()) {

                @Override
                protected IStatus run(final IProgressMonitor monitor) {
                    try {
                        MetricsThreadLocal.set(metrics);
                        // commit if anything had changed
                        LOGGER.info("Start ICDOChangeProcessor commit() {}", processor.getClass());
                        final IndexCommitChangeSet indexCommitChangeSet = processor.commit();

                        // log changes
                        logUserActivity(commitChangeSet, indexCommitChangeSet);

                        LOGGER.info("Finished ICDOChangeProcessor commit() {}", processor.getClass());

                        // Add to set of change processors that committed changes successfully
                        committedChangeProcessors.add(processor);
                        indexCommitChangeSets.add(indexCommitChangeSet);

                        return Status.OK_STATUS;
                    } catch (final Exception e) {
                        try {
                            processor.rollback();
                        } catch (final SnowowlServiceException ee) {
                            return new Status(IStatus.ERROR, DatastoreServerActivator.PLUGIN_ID,
                                    "Error while rolling back changes in " + processor.getName()
                                            + " for branch: " + branchPath,
                                    ee);
                        }
                        return new Status(IStatus.ERROR, DatastoreServerActivator.PLUGIN_ID,
                                "Error while committing changes with " + processor.getName() + " for branch: "
                                        + branchPath,
                                e);
                    } finally {
                        MetricsThreadLocal.release();
                    }
                }
            });
        }

        ForkJoinUtils.runJobsInParallelWithErrorHandling(commitJobs, null);
        // queue commit notification
        if (isCommitNotificationEnabled) {
            final IndexCommitChangeSet mergedChangeSet = merge(indexCommitChangeSets);
            getContext().getService(RepositoryManager.class).get(repositoryUuid)
                    .sendNotification(toCommitNotification(mergedChangeSet));
        }
    } catch (final Exception e) {
        caughtException = new SnowowlRuntimeException(
                "Error when committing change processors on branch: " + branchPath, e);
    } finally {
        cleanupAfterCommit(caughtException, committedChangeProcessors);
    }
}

From source file:com.googlesource.gerrit.plugins.reviewers.ChangeEventListener.java

private Set<Account> toAccounts(ReviewDb reviewDb, Set<String> in, Project.NameKey p, String uploaderEMail) {
    Set<Account> reviewers = Sets.newHashSetWithExpectedSize(in.size());
    GroupMembers groupMembers = null;/*from  w ww .  jav a  2  s  .  c  o  m*/
    for (String r : in) {
        try {
            Account account = accountResolver.find(reviewDb, r);
            if (account != null) {
                reviewers.add(account);
                continue;
            }
        } catch (OrmException e) {
            // If the account doesn't exist, find() will return null.  We only
            // get here if something went wrong accessing the database
            log.error("Failed to resolve account " + r, e);
            continue;
        }
        if (groupMembers == null) {
            groupMembers = groupMembersFactory.create(
                    identifiedUserFactory.create(Iterables.getOnlyElement(byEmailCache.get(uploaderEMail))));
        }
        try {
            reviewers.addAll(groupMembers.listAccounts(groupsCollection.get().parse(r).getGroupUUID(), p));
        } catch (UnprocessableEntityException | NoSuchGroupException e) {
            log.warn(String.format("Reviewer %s is neither an account nor a group", r));
        } catch (NoSuchProjectException e) {
            log.warn(String.format("Failed to list accounts for group %s and project %s", r, p));
        } catch (IOException | OrmException e) {
            log.warn(String.format("Failed to list accounts for group %s", r), e);
        }
    }
    return reviewers;
}