Example usage for java.lang ThreadLocal get

List of usage examples for java.lang ThreadLocal get

Introduction

In this page you can find the example usage for java.lang ThreadLocal get.

Prototype

public T get() 

Source Link

Document

Returns the value in the current thread's copy of this thread-local variable.

Usage

From source file:com.netsteadfast.greenstep.bsc.util.BscReportSupportUtils.java

public static void loadExpression(ThreadLocal<SysExpressionVO> exprThreadLocal, String exprId)
        throws ServiceException, Exception {
    if (exprThreadLocal.get() == null) {
        SysExpressionVO sysExpression = new SysExpressionVO();
        sysExpression.setExprId(exprId);
        DefaultResult<SysExpressionVO> result = sysExpressionService.findByUkCacheable(sysExpression);
        if (result.getValue() != null) {
            sysExpression = result.getValue();
            exprThreadLocal.set(sysExpression);
        }/*from   w  ww  .  ja  v a 2s . c  om*/
    }
}

From source file:org.apache.sysml.runtime.matrix.data.LibMatrixNative.java

private static FloatBuffer toFloatBuffer(double[] input, ThreadLocal<FloatBuffer> buff, boolean copy) {
    //maintain thread-local buffer (resized on demand)
    FloatBuffer ret = buff.get();
    if (ret == null || ret.capacity() < input.length) {
        ret = ByteBuffer.allocateDirect(4 * input.length).order(ByteOrder.nativeOrder()).asFloatBuffer();
        buff.set(ret);/* w  ww .  jav a  2 s . co  m*/
    }
    //copy to direct byte buffer
    final FloatBuffer ret2 = ret;
    if (copy) {
        IntStream.range(0, input.length).parallel().forEach(i -> ret2.put(i, (float) input[i]));
    }
    return ret2;
}

From source file:com.kylinolap.dict.DateStrDictionary.java

static SimpleDateFormat getDateFormat(String datePattern) {
    ThreadLocal<SimpleDateFormat> formatThreadLocal = threadLocalMap.get(datePattern);
    if (formatThreadLocal == null) {
        threadLocalMap.put(datePattern, formatThreadLocal = new ThreadLocal<SimpleDateFormat>());
    }//from  w  w  w  .  j av  a  2  s . c o  m
    SimpleDateFormat format = formatThreadLocal.get();
    if (format == null) {
        format = new SimpleDateFormat(datePattern);
        format.setTimeZone(TimeZone.getTimeZone("GMT")); // NOTE: this must
                                                         // be GMT to
                                                         // calculate
                                                         // epoch date
                                                         // correctly
        formatThreadLocal.set(format);
    }
    return format;
}

From source file:com.strategicgains.docussandra.controller.perf.remote.mongo.MongoLoader.java

public static void loadMongoData(MongoClientURI uri, final int NUM_WORKERS, Database database,
        final int numDocs, final PerfTestParent clazz) {
    logger.info("------------Loading Data into: " + database.name() + " with MONGO!------------");
    try {//  w ww  . j ava  2 s  .c  o  m
        try {
            MongoClient mongoClient = new MongoClient(uri);
            mongoClient.setWriteConcern(WriteConcern.MAJORITY);
            DB db = mongoClient.getDB(database.name());
            final DBCollection coll = db.getCollection(database.name());
            ArrayList<Thread> workers = new ArrayList<>(NUM_WORKERS + 1);
            int docsPerWorker = numDocs / NUM_WORKERS;
            try {
                List<Document> docs = clazz.getDocumentsFromFS();
                ArrayList<List<Document>> documentQueues = new ArrayList<>(NUM_WORKERS + 1);
                int numDocsAssigned = 0;
                while ((numDocsAssigned + 1) < numDocs) {
                    int start = numDocsAssigned;
                    int end = numDocsAssigned + docsPerWorker;
                    if (end > numDocs) {
                        end = numDocs - 1;
                    }
                    documentQueues.add(new ArrayList(docs.subList(start, end)));
                    numDocsAssigned = end;
                }
                for (final List<Document> queue : documentQueues) {
                    workers.add(new Thread() {
                        @Override
                        public void run() {
                            for (Document d : queue) {
                                DBObject o = (DBObject) JSON.parse(d.object());
                                coll.save(o);
                            }
                            logger.info("Thread " + Thread.currentThread().getName() + " is done. It processed "
                                    + queue.size() + " documents.");
                        }
                    });
                }
            } catch (UnsupportedOperationException e)//we can't read everything in at once
            {
                //all we need to do in this block is find a way to set "workers"
                for (int i = 0; i < NUM_WORKERS; i++) {
                    workers.add(new Thread() {
                        private final int chunk = (int) (Math.random() * 100) + 150;//pick a random chunk so we are not going back to the FS all at the same time and potentially causing a bottle neck

                        @Override
                        public void run() {
                            ThreadLocal<Integer> counter = new ThreadLocal<>();
                            counter.set(new Integer(0));
                            try {
                                List<Document> docs = clazz.getDocumentsFromFS(chunk);//grab a handful of documents
                                while (docs.size() > 0) {
                                    for (Document d : docs)//process the documents we grabbed
                                    {
                                        DBObject o = (DBObject) JSON.parse(d.object());
                                        coll.save(o);
                                        counter.set(counter.get() + 1);
                                    }
                                    docs = clazz.getDocumentsFromFS(chunk);//grab another handful of documents
                                }
                                logger.info("Thread " + Thread.currentThread().getName()
                                        + " is done. It processed " + counter.get() + " documents.");
                            } catch (IOException | ParseException e) {
                                logger.error("Couldn't read from document", e);
                            }
                        }
                    });
                }
            }

            long start = new Date().getTime();
            //start your threads!
            for (Thread t : workers) {
                t.start();
            }
            logger.info("All threads started, waiting for completion.");
            boolean allDone = false;
            boolean first = true;
            while (!allDone || first) {
                first = false;
                boolean done = true;
                for (Thread t : workers) {
                    if (t.isAlive()) {
                        done = false;
                        logger.info("Thread " + t.getName() + " is still running.");
                        break;
                    }
                }
                if (done) {
                    allDone = true;
                } else {
                    logger.info("We still have workers running...");
                    try {
                        Thread.sleep(10000);
                    } catch (InterruptedException e) {
                    }
                }
            }
            long end = new Date().getTime();
            long miliseconds = end - start;
            double seconds = (double) miliseconds / 1000d;
            output.info("Done loading data using: " + NUM_WORKERS + ". Took: " + seconds + " seconds");
            double tpms = (double) numDocs / (double) miliseconds;
            double tps = tpms * 1000;
            double transactionTime = (double) miliseconds / (double) numDocs;
            output.info(database.name() + " Mongo Average Transactions Per Second: " + tps);
            output.info(
                    database.name() + " Mongo Average Transactions Time (in miliseconds): " + transactionTime);

        } catch (UnknownHostException e) {
            logger.error("Couldn't connect to Mongo Server", e);
        }
    } catch (IOException | ParseException e) {
        logger.error("Couldn't read data.", e);
    }
}

From source file:org.apache.ode.dao.jpa.hibernate.BpelDAOConnectionFactoryImpl.java

public BpelDAOConnection getConnection() {
    final ThreadLocal<BpelDAOConnectionImpl> currentConnection = BpelDAOConnectionImpl.getThreadLocal();

    BpelDAOConnectionImpl conn = (BpelDAOConnectionImpl) currentConnection.get();
    if (conn != null && HibernateUtil.isOpen(conn)) {
        return conn;
    } else {//from   ww  w.  ja  v  a  2 s.  c o m
        EntityManager em = _emf.createEntityManager();
        conn = new BpelDAOConnectionImpl(em, _txm, _operator);
        currentConnection.set(conn);
        return conn;
    }
}

From source file:org.apache.ode.dao.jpa.hibernate.ConfStoreDAOConnectionFactoryImpl.java

public ConfStoreDAOConnection getConnection() {
    final ThreadLocal<ConfStoreDAOConnectionImpl> currentConnection = ConfStoreDAOConnectionImpl
            .getThreadLocal();//from  w  w  w  . jav  a2  s.  c o m

    ConfStoreDAOConnectionImpl conn = (ConfStoreDAOConnectionImpl) currentConnection.get();
    if (conn != null && HibernateUtil.isOpen(conn)) {
        return conn;
    } else {
        EntityManager em = _emf.createEntityManager();
        conn = new ConfStoreDAOConnectionImpl(em, _txm, _operator);
        currentConnection.set(conn);
        return conn;
    }
}

From source file:org.apache.ode.dao.jpa.hibernate.SchedulerDAOConnectionFactoryImpl.java

public SchedulerDAOConnection getConnection() {
    final ThreadLocal<SchedulerDAOConnectionImpl> currentConnection = SchedulerDAOConnectionImpl
            .getThreadLocal();//from   ww  w  . j a v a  2 s .c om

    SchedulerDAOConnectionImpl conn = (SchedulerDAOConnectionImpl) currentConnection.get();
    if (conn != null && HibernateUtil.isOpen(conn)) {
        return conn;
    } else {
        EntityManager em = _emf.createEntityManager();
        conn = new SchedulerDAOConnectionImpl(em, _txm, _operator);
        currentConnection.set(conn);
        return conn;
    }
}

From source file:org.codice.alliance.nsili.common.ResultDAGConverter.java

public static DAG convertResult(Result result, ORB orb, POA poa, List<String> resultAttributes,
        Map<String, List<String>> mandatoryAttributes) throws DagParsingException {
    Double distanceInMeters = result.getDistanceInMeters();
    Double resultScore = result.getRelevanceScore();
    Metacard metacard = result.getMetacard();

    DAG dag = new DAG();
    DirectedAcyclicGraph<Node, Edge> graph = new DirectedAcyclicGraph<>(Edge.class);

    ProductImpl productImpl = new ProductImpl();

    String id = result.getMetacard().getId();

    if (!CorbaUtils.isIdActive(poa, id.getBytes(Charset.forName(ENCODING)))) {
        try {//  w w  w  . j  a  v  a 2s . co  m
            poa.activate_object_with_id(id.getBytes(Charset.forName(ENCODING)), productImpl);
        } catch (ServantAlreadyActive | ObjectAlreadyActive | WrongPolicy e) {
            LOGGER.info("Convert DAG : Unable to activate product impl object ({}): {}",
                    result.getMetacard().getId(), e.getLocalizedMessage());
        }
    }

    org.omg.CORBA.Object obj = poa.create_reference_with_id(id.getBytes(Charset.forName(ENCODING)),
            ProductHelper.id());
    Product product = ProductHelper.narrow(obj);

    Node productNode = createRootNode(orb);
    String attributeName = NsiliConstants.NSIL_PRODUCT;

    Any productAny = orb.create_any();
    ProductHelper.insert(productAny, product);
    productNode.value = productAny;

    graph.addVertex(productNode);

    List<String> addedAttributes = new ArrayList<>();
    addedAttributes.addAll(addCardNodeWithAttributes(graph, productNode, metacard, orb, attributeName + ":",
            resultAttributes));
    addedAttributes.addAll(addFileNodeWithAttributes(graph, productNode, metacard, orb, attributeName + ":",
            resultAttributes));
    addedAttributes.addAll(addSecurityNodeWithAttributes(graph, productNode, metacard, orb, attributeName + ":",
            resultAttributes));
    addedAttributes.addAll(addMetadataSecurityNodeWithAttributes(graph, productNode, metacard, orb,
            attributeName + ":", resultAttributes));
    addedAttributes.addAll(addParts(graph, productNode, metacard, orb, attributeName + ":", resultAttributes));

    if (metacard.getThumbnail() != null && metacard.getThumbnail().length > 0) {
        addedAttributes.addAll(addThumbnailRelatedFile(graph, productNode, metacard, orb, attributeName + ":",
                resultAttributes));
    }

    if (mandatoryAttributes != null && !mandatoryAttributes.isEmpty()) {
        final ThreadLocal<Boolean> dataIsValid = new ThreadLocal<>();
        dataIsValid.set(true);
        Map<String, List<String>> addedAttrMap = getAttrMap(addedAttributes);
        addedAttrMap.entrySet().stream().forEach(entry -> dataIsValid.set(dataIsValid.get()
                && processEntry(entry.getKey(), mandatoryAttributes.get(entry.getKey()), entry.getValue())));

        if (!dataIsValid.get()) {
            throw new DagParsingException("One or more mandatory attributes is missing on outgoing data");
        }
    }

    graph.addVertex(productNode);

    NsiliCommonUtils.setUCOEdgeIds(graph);
    NsiliCommonUtils.setUCOEdges(productNode, graph);
    dag.edges = NsiliCommonUtils.getEdgeArrayFromGraph(graph);
    dag.nodes = NsiliCommonUtils.getNodeArrayFromGraph(graph);

    return dag;
}

From source file:net.netheos.pcsapi.oauth.PasswordSessionManager.java

private synchronized HttpContext getHttpContext(HttpHost host) {
    ThreadLocal<HttpContext> tlContext = cache.get(host);
    if (tlContext == null) {
        tlContext = new ThreadLocal<HttpContext>();
        cache.put(host, tlContext);//from   w  ww. jav a2s. c o  m
    }
    HttpContext context = tlContext.get();
    if (context == null) {
        AuthScope scope = new AuthScope(host.getHostName(), host.getPort());
        CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
        credentialsProvider.setCredentials(scope, usernamePasswordCredentials);

        context = new BasicHttpContext();
        context.setAttribute(ClientContext.CREDS_PROVIDER, credentialsProvider);
        tlContext.set(context);
    }
    return context;
}

From source file:net.sf.jasperreports.engine.xml.BaseSaxParserFactory.java

protected void setGrammarPoolProperty(SAXParser parser, String poolClassName) {
    try {/*from  w ww .j a  v  a2 s  .  c o  m*/
        Object cacheKey = getGrammarPoolCacheKey();

        // we're using thread local caches to avoid thread safety problems
        ThreadLocal<ReferenceMap<Object, Object>> grammarPoolCache = getGrammarPoolCache();
        ReferenceMap<Object, Object> cacheMap = grammarPoolCache.get();
        if (cacheMap == null) {
            cacheMap = new ReferenceMap<Object, Object>(ReferenceMap.ReferenceStrength.WEAK,
                    ReferenceMap.ReferenceStrength.SOFT);
            grammarPoolCache.set(cacheMap);
        }

        Object grammarPool = cacheMap.get(cacheKey);
        if (grammarPool == null) {
            if (log.isDebugEnabled()) {
                log.debug("Instantiating grammar pool of type " + poolClassName + " for cache key " + cacheKey);
            }

            grammarPool = ClassUtils.instantiateClass(poolClassName, Object.class);
            cacheMap.put(cacheKey, grammarPool);
        }

        parser.setProperty(XERCES_PARSER_PROPERTY_GRAMMAR_POOL, grammarPool);
    } catch (Exception e) {
        if (log.isDebugEnabled()) {
            log.debug("Error setting Xerces grammar pool of type " + poolClassName, e);
        }
    }
}