Example usage for java.lang ThreadLocal set

List of usage examples for java.lang ThreadLocal set

Introduction

In this page you can find the example usage for java.lang ThreadLocal set.

Prototype

public void set(T value) 

Source Link

Document

Sets the current thread's copy of this thread-local variable to the specified value.

Usage

From source file:com.kylinolap.dict.DateStrDictionary.java

static SimpleDateFormat getDateFormat(String datePattern) {
    ThreadLocal<SimpleDateFormat> formatThreadLocal = threadLocalMap.get(datePattern);
    if (formatThreadLocal == null) {
        threadLocalMap.put(datePattern, formatThreadLocal = new ThreadLocal<SimpleDateFormat>());
    }/*from   ww  w. j av a2 s .  co  m*/
    SimpleDateFormat format = formatThreadLocal.get();
    if (format == null) {
        format = new SimpleDateFormat(datePattern);
        format.setTimeZone(TimeZone.getTimeZone("GMT")); // NOTE: this must
                                                         // be GMT to
                                                         // calculate
                                                         // epoch date
                                                         // correctly
        formatThreadLocal.set(format);
    }
    return format;
}

From source file:org.codice.alliance.nsili.common.ResultDAGConverter.java

private static boolean processEntry(String entryName, List<String> requiredAttrs, List<String> parsedAttrs) {
    final ThreadLocal<Boolean> dataIsValid = new ThreadLocal<>();
    dataIsValid.set(true);

    if (requiredAttrs != null) {
        requiredAttrs.stream().filter(requiredAttr -> !parsedAttrs.contains(requiredAttr))
                .forEach(missingAttr -> {
                    dataIsValid.set(false);
                    LOGGER.warn("Node: {} is missing attribute: {}", entryName, missingAttr);
                });/*  w  w  w . jav  a2s.  com*/
    }

    return dataIsValid.get();
}

From source file:org.codice.alliance.nsili.common.ResultDAGConverter.java

public static DAG convertResult(Result result, ORB orb, POA poa, List<String> resultAttributes,
        Map<String, List<String>> mandatoryAttributes) throws DagParsingException {
    Double distanceInMeters = result.getDistanceInMeters();
    Double resultScore = result.getRelevanceScore();
    Metacard metacard = result.getMetacard();

    DAG dag = new DAG();
    DirectedAcyclicGraph<Node, Edge> graph = new DirectedAcyclicGraph<>(Edge.class);

    ProductImpl productImpl = new ProductImpl();

    String id = result.getMetacard().getId();

    if (!CorbaUtils.isIdActive(poa, id.getBytes(Charset.forName(ENCODING)))) {
        try {//  w w  w. j a  va  2s  .c  om
            poa.activate_object_with_id(id.getBytes(Charset.forName(ENCODING)), productImpl);
        } catch (ServantAlreadyActive | ObjectAlreadyActive | WrongPolicy e) {
            LOGGER.info("Convert DAG : Unable to activate product impl object ({}): {}",
                    result.getMetacard().getId(), e.getLocalizedMessage());
        }
    }

    org.omg.CORBA.Object obj = poa.create_reference_with_id(id.getBytes(Charset.forName(ENCODING)),
            ProductHelper.id());
    Product product = ProductHelper.narrow(obj);

    Node productNode = createRootNode(orb);
    String attributeName = NsiliConstants.NSIL_PRODUCT;

    Any productAny = orb.create_any();
    ProductHelper.insert(productAny, product);
    productNode.value = productAny;

    graph.addVertex(productNode);

    List<String> addedAttributes = new ArrayList<>();
    addedAttributes.addAll(addCardNodeWithAttributes(graph, productNode, metacard, orb, attributeName + ":",
            resultAttributes));
    addedAttributes.addAll(addFileNodeWithAttributes(graph, productNode, metacard, orb, attributeName + ":",
            resultAttributes));
    addedAttributes.addAll(addSecurityNodeWithAttributes(graph, productNode, metacard, orb, attributeName + ":",
            resultAttributes));
    addedAttributes.addAll(addMetadataSecurityNodeWithAttributes(graph, productNode, metacard, orb,
            attributeName + ":", resultAttributes));
    addedAttributes.addAll(addParts(graph, productNode, metacard, orb, attributeName + ":", resultAttributes));

    if (metacard.getThumbnail() != null && metacard.getThumbnail().length > 0) {
        addedAttributes.addAll(addThumbnailRelatedFile(graph, productNode, metacard, orb, attributeName + ":",
                resultAttributes));
    }

    if (mandatoryAttributes != null && !mandatoryAttributes.isEmpty()) {
        final ThreadLocal<Boolean> dataIsValid = new ThreadLocal<>();
        dataIsValid.set(true);
        Map<String, List<String>> addedAttrMap = getAttrMap(addedAttributes);
        addedAttrMap.entrySet().stream().forEach(entry -> dataIsValid.set(dataIsValid.get()
                && processEntry(entry.getKey(), mandatoryAttributes.get(entry.getKey()), entry.getValue())));

        if (!dataIsValid.get()) {
            throw new DagParsingException("One or more mandatory attributes is missing on outgoing data");
        }
    }

    graph.addVertex(productNode);

    NsiliCommonUtils.setUCOEdgeIds(graph);
    NsiliCommonUtils.setUCOEdges(productNode, graph);
    dag.edges = NsiliCommonUtils.getEdgeArrayFromGraph(graph);
    dag.nodes = NsiliCommonUtils.getNodeArrayFromGraph(graph);

    return dag;
}

From source file:com.strategicgains.docussandra.controller.perf.remote.parent.PerfTestParent.java

public void loadData() throws IOException, ParseException, InterruptedException {
    logger.info("------------Loading Data into: " + this.getDb().name() + " with Docussandra!------------");
    ArrayList<Thread> workers = new ArrayList<>(NUM_WORKERS + 1);
    int numDocs = getNumDocuments();
    int docsPerWorker = numDocs / NUM_WORKERS;
    try {//from   w w  w .  jav a  2 s  .c  o m
        List<Document> docs = getDocumentsFromFS();
        ArrayList<List<Document>> documentQueues = new ArrayList<>(NUM_WORKERS + 1);
        int numDocsAssigned = 0;
        while ((numDocsAssigned + 1) < numDocs) {
            int start = numDocsAssigned;
            int end = numDocsAssigned + docsPerWorker;
            if (end > numDocs) {
                end = numDocs - 1;
            }
            documentQueues.add(new ArrayList(docs.subList(start, end)));
            numDocsAssigned = end;
        }
        for (final List<Document> queue : documentQueues) {
            workers.add(new Thread() {
                @Override
                public void run() {
                    for (Document d : queue) {
                        //logger.debug("Processing document: " + d.toString());
                        postDocument(getDb(), getTb(), d);
                    }
                    logger.info("Thread " + Thread.currentThread().getName() + " is done.");
                }
            });
        }
    } catch (UnsupportedOperationException e)//we can't read everything in at once
    {
        //all we need to do in this block is find a way to set "workers"
        for (int i = 0; i < NUM_WORKERS; i++) {
            workers.add(new Thread() {
                private final int chunk = (int) (Math.random() * 100) + 150;//pick a random chunk so we are not going back to the FS all at the same time and potentially causing a bottle neck

                @Override
                public void run() {
                    ThreadLocal<Integer> counter = new ThreadLocal<>();
                    counter.set(new Integer(0));
                    try {
                        List<Document> docs = getDocumentsFromFS(chunk);//grab a handful of documents
                        while (docs.size() > 0) {
                            for (Document d : docs)//process the documents we grabbed
                            {
                                //logger.debug("Processing document: " + d.toString());
                                postDocument(getDb(), getTb(), d);//post them up
                                counter.set(counter.get() + 1);
                            }
                            docs = getDocumentsFromFS(chunk);//grab another handful of documents
                        }
                        logger.info("Thread " + Thread.currentThread().getName() + " is done. It processed "
                                + counter.get() + " documents.");
                    } catch (IOException | ParseException e) {
                        logger.error("Couldn't read from document", e);
                    }
                }
            });
        }
    }

    //long start = new Date().getTime();
    StopWatch sw = new StopWatch();
    sw.start();
    //start your threads!
    for (Thread t : workers) {
        t.start();
    }
    logger.info("All threads started, waiting for completion.");
    boolean allDone = false;
    boolean first = true;
    while (!allDone || first) {
        first = false;
        boolean done = true;
        for (Thread t : workers) {
            if (t.isAlive()) {
                done = false;
                logger.info("Thread " + t.getName() + " is still running.");
                break;
            }
        }
        if (done) {
            allDone = true;
            sw.stop();
        } else {
            logger.info("We still have workers running...");
            Thread.sleep(5000);
        }
    }

    long miliseconds = sw.getTime();
    double seconds = (double) miliseconds / 1000d;
    output.info("Doc: Done loading data using: " + NUM_WORKERS + " and URL: " + BASE_URI + ". Took: " + seconds
            + " seconds");
    double tpms = (double) numDocs / (double) miliseconds;
    double tps = tpms * 1000;
    double transactionTime = (double) miliseconds / (double) numDocs;
    output.info(this.getDb().name() + " Doc: Average Transactions Per Second: " + tps);
    output.info(this.getDb().name() + " Doc: Average Transactions Time (in miliseconds): " + transactionTime);
    Thread.sleep(100000);//sleep a bit to let the DB digest that before trying anything else
}

From source file:com.qrmedia.commons.persistence.hibernate.clone.HibernateEntityGraphClonerTest.java

@SuppressWarnings("unchecked")
@Test/*  ww  w. j  av  a2s  .  c o m*/
public void clone_entities() throws IllegalAccessException {
    final StubHibernateEntity entity1 = new StubHibernateEntity();

    String property = "007";
    final StubHibernateEntity relatedEntity = new SimplePropertyEqualStubHibernateEntity(property);
    entity1.setNonSimpleBeanProperty(relatedEntity);

    Set<StubHibernateEntity> nonSimpleCollectionBeanProperty = new HashSet<StubHibernateEntity>();

    // reuse relatedEntity to check if its clone is used in both places
    nonSimpleCollectionBeanProperty.add(relatedEntity);
    entity1.setNonSimpleCollectionBeanProperty(nonSimpleCollectionBeanProperty);

    // the first call to the bean cloner creates a clone, adds a new entity and some commands
    final GraphWiringCommand graphWiringCommand1 = createMock(GraphWiringCommand.class);
    final GraphPostProcessingCommand graphPostProcessingCommand = createMock(GraphPostProcessingCommand.class);
    final StubHibernateEntity clone1 = new StubHibernateEntity();
    entityBeanCloner.visitNode(eq(new EntityPreserveIdFlagPair(entity1, false)), same(entityGraphCloner),
            (IdentityHashMap<Object, Object>) anyObject());
    expectLastCall()
            .andAnswer(new HibernateEntityBeanClonerActions(entity1, clone1, Arrays.asList(relatedEntity),
                    Arrays.asList(graphWiringCommand1), Arrays.asList(graphPostProcessingCommand)));

    // note that entity2 is equal to (but not identical to) relatedEntity!
    final GraphWiringCommand graphWiringCommand2 = createMock(GraphWiringCommand.class);
    final StubHibernateEntity entity2 = new SimplePropertyEqualStubHibernateEntity(property);
    entity2.setNonSimpleBeanProperty(entity1);
    final StubHibernateEntity clone2 = new SimplePropertyEqualStubHibernateEntity(property);
    entityBeanCloner.visitNode(eq(new EntityPreserveIdFlagPair(entity2, false)), same(entityGraphCloner),
            (IdentityHashMap<Object, Object>) anyObject());
    expectLastCall().andAnswer(new HibernateEntityBeanClonerActions(entity2, clone2, null,
            Arrays.asList(graphWiringCommand2), null));

    final StubHibernateEntity relatedEntityClone = new SimplePropertyEqualStubHibernateEntity(property);
    entityBeanCloner.visitNode(eq(new EntityPreserveIdFlagPair(relatedEntity, false)), same(entityGraphCloner),
            (IdentityHashMap<Object, Object>) anyObject());
    expectLastCall().andAnswer(new HibernateEntityBeanClonerActions(relatedEntity, relatedEntityClone));

    // use flags mutable for the mocks to track the order of calls
    final ThreadLocal<Integer> numGraphWiringCommandExecuted = new ThreadLocal<Integer>();
    numGraphWiringCommandExecuted.set(0);

    // the entity graph cloner should call the commands in the order they were added

    graphWiringCommand1.forEntities();
    expectLastCall().andReturn(Arrays.asList(entity1));
    graphWiringCommand1.execute(MapUtils.toMap(new IdentityHashMap<Object, Object>(), entity1, clone1));
    expectLastCall().andAnswer(new NumGraphWiringCommandsExecutedVerifier(numGraphWiringCommandExecuted, 0));

    graphWiringCommand2.forEntities();
    expectLastCall().andReturn(Arrays.asList(relatedEntity));
    graphWiringCommand2
            .execute(MapUtils.toMap(new IdentityHashMap<Object, Object>(), relatedEntity, relatedEntityClone));
    expectLastCall().andAnswer(new NumGraphWiringCommandsExecutedVerifier(numGraphWiringCommandExecuted, 1));

    // this *must* be called after all the wiring commands have been completed
    graphPostProcessingCommand.execute();
    expectLastCall().andAnswer(new IAnswer<Object>() {

        public Object answer() throws Throwable {

            if (!(numGraphWiringCommandExecuted.get() == 2)) {
                fail("Graph post-processing command executed before wiring was complete.");
            }

            return null;
        }

    });

    replay(entityBeanCloner, graphWiringCommand1, graphWiringCommand2, graphPostProcessingCommand);

    Map<StubHibernateEntity, StubHibernateEntity> clones = entityGraphCloner
            .clone(Arrays.asList(entity1, entity2));
    assertEquals(MapUtils.<StubHibernateEntity, StubHibernateEntity>toMap(entity1, clone1, entity2, clone2),
            clones);

    verify(entityBeanCloner, graphWiringCommand1, graphWiringCommand2, graphPostProcessingCommand);

    // check that any internal state maintained during the cloning has been cleaned up
    assertTrue(ReflectionUtils.<List<?>>getValue(entityGraphCloner, "graphWiringCommands").isEmpty());
    assertTrue(ReflectionUtils.<List<?>>getValue(entityGraphCloner, "graphPostProcessingCommands").isEmpty());

    /*
     * The actual wiring of the objects is *not* checked because that is the function
     * of the command objects, *not* the entity graph cloner.
     * As such, this is not within the scope of a unit test.
     */
}

From source file:org.apache.ode.dao.jpa.hibernate.BpelDAOConnectionFactoryImpl.java

public BpelDAOConnection getConnection() {
    final ThreadLocal<BpelDAOConnectionImpl> currentConnection = BpelDAOConnectionImpl.getThreadLocal();

    BpelDAOConnectionImpl conn = (BpelDAOConnectionImpl) currentConnection.get();
    if (conn != null && HibernateUtil.isOpen(conn)) {
        return conn;
    } else {/*from  w  w  w.j  a va2  s .  c o m*/
        EntityManager em = _emf.createEntityManager();
        conn = new BpelDAOConnectionImpl(em, _txm, _operator);
        currentConnection.set(conn);
        return conn;
    }
}

From source file:org.squale.jraf.provider.persistence.hibernate.HibernateFilter.java

/**
 * Pre-traitement d'une requete HTTP/*from  w w  w. ja va  2s  . c  o  m*/
 */
public void preProcess(ServletRequest request, ServletResponse response) throws IOException, ServletException {

    SessionImpl session = null;
    Iterator iterator = getProvidersMap().entrySet().iterator();
    Map.Entry entry = null;
    PersistenceProviderImpl persistenceProvider = null;
    String providerName = null;
    HttpServletRequest httpRequest = (HttpServletRequest) request;
    HttpSession httpSession = null;

    // pour chaque provider
    while (iterator.hasNext()) {

        entry = (Map.Entry) iterator.next();
        providerName = (String) entry.getKey();
        persistenceProvider = (PersistenceProviderImpl) entry.getValue();

        if (log.isDebugEnabled()) {
            log.debug("providerName=" + providerName);
        }

        // cas thread local session et longue session
        if (persistenceProvider.isThreadLocalSession() && persistenceProvider.isLongSession()) {
            if (log.isDebugEnabled()) {
                log.debug("Cas thread local session et session longue...");
            }

            httpSession = httpRequest.getSession();

            if (log.isDebugEnabled()) {
                log.debug("Recuperation de la session...");
            }
            session = (SessionImpl) httpSession.getAttribute(SESSIONS_KEY + providerName);

            if (session != null) {
                if (log.isDebugEnabled()) {
                    log.debug("Session existante...");
                    log.debug("Mise de la session dans le thread local storage...");
                }

                ThreadLocal tl = new ThreadLocal();
                tl.set(session);
                persistenceProvider.setThreadLocal(tl);
            } else {
                if (log.isDebugEnabled()) {
                    log.debug("Session non existante...");
                    log.debug("Elle sera creee en cas d'appel...");
                }
            }

        }

    }

    // rien a faire par defaut

}

From source file:org.apache.ode.dao.jpa.hibernate.ConfStoreDAOConnectionFactoryImpl.java

public ConfStoreDAOConnection getConnection() {
    final ThreadLocal<ConfStoreDAOConnectionImpl> currentConnection = ConfStoreDAOConnectionImpl
            .getThreadLocal();/*from  www. ja v  a 2s.com*/

    ConfStoreDAOConnectionImpl conn = (ConfStoreDAOConnectionImpl) currentConnection.get();
    if (conn != null && HibernateUtil.isOpen(conn)) {
        return conn;
    } else {
        EntityManager em = _emf.createEntityManager();
        conn = new ConfStoreDAOConnectionImpl(em, _txm, _operator);
        currentConnection.set(conn);
        return conn;
    }
}

From source file:org.apache.ode.dao.jpa.hibernate.SchedulerDAOConnectionFactoryImpl.java

public SchedulerDAOConnection getConnection() {
    final ThreadLocal<SchedulerDAOConnectionImpl> currentConnection = SchedulerDAOConnectionImpl
            .getThreadLocal();/*from w w w  . j  a v a 2s.co m*/

    SchedulerDAOConnectionImpl conn = (SchedulerDAOConnectionImpl) currentConnection.get();
    if (conn != null && HibernateUtil.isOpen(conn)) {
        return conn;
    } else {
        EntityManager em = _emf.createEntityManager();
        conn = new SchedulerDAOConnectionImpl(em, _txm, _operator);
        currentConnection.set(conn);
        return conn;
    }
}

From source file:net.sf.jasperreports.engine.xml.BaseSaxParserFactory.java

protected void setGrammarPoolProperty(SAXParser parser, String poolClassName) {
    try {/*from  w w w .  jav  a 2s  .co m*/
        Object cacheKey = getGrammarPoolCacheKey();

        // we're using thread local caches to avoid thread safety problems
        ThreadLocal<ReferenceMap<Object, Object>> grammarPoolCache = getGrammarPoolCache();
        ReferenceMap<Object, Object> cacheMap = grammarPoolCache.get();
        if (cacheMap == null) {
            cacheMap = new ReferenceMap<Object, Object>(ReferenceMap.ReferenceStrength.WEAK,
                    ReferenceMap.ReferenceStrength.SOFT);
            grammarPoolCache.set(cacheMap);
        }

        Object grammarPool = cacheMap.get(cacheKey);
        if (grammarPool == null) {
            if (log.isDebugEnabled()) {
                log.debug("Instantiating grammar pool of type " + poolClassName + " for cache key " + cacheKey);
            }

            grammarPool = ClassUtils.instantiateClass(poolClassName, Object.class);
            cacheMap.put(cacheKey, grammarPool);
        }

        parser.setProperty(XERCES_PARSER_PROPERTY_GRAMMAR_POOL, grammarPool);
    } catch (Exception e) {
        if (log.isDebugEnabled()) {
            log.debug("Error setting Xerces grammar pool of type " + poolClassName, e);
        }
    }
}